diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ac1c24d10..200f9513f 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -a8f547d3728fba835fbdda301e846829c5cbbef5 \ No newline at end of file +633dafff1aa6f0198a576cf83bfa81b2b4f27d46 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 44b3782e1..06981143e 100755 --- a/.gitattributes +++ b/.gitattributes @@ -23,13 +23,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Tempora databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelOptimizeResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CreateCustomLlmRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/DeleteCustomLlmRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/DeleteCustomLlmResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java linguist-generated=true @@ -206,6 +209,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareA databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java linguist-generated=true @@ -232,6 +236,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchem databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java linguist-generated=true @@ -414,6 +419,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpecOauthStage.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpecOptionType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java linguist-generated=true @@ -443,6 +451,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo. databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java linguist-generated=true @@ -817,6 +827,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSu databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardView.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteConversationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true @@ -824,20 +835,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSu databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationSummary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java linguist-generated=true @@ -847,6 +858,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSer databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true @@ -882,12 +894,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrip databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashSpaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java linguist-generated=true @@ -895,6 +909,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCa databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRef.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleAttributes.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleIdentityType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleMembershipRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java linguist-generated=true @@ -902,16 +921,22 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteData databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeltaTableSyncInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true @@ -920,6 +945,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedData databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePosition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java linguist-generated=true @@ -1120,10 +1146,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageS databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformJobRunStep.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformRunStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformTask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java linguist-generated=true @@ -1460,6 +1489,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java linguist-generated=true @@ -1468,6 +1498,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelReque databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateOnlineStoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRunResponse.java linguist-generated=true @@ -1480,6 +1511,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java linguist-generated=true @@ -1492,6 +1525,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersi databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineStoreRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineStoreResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java linguist-generated=true @@ -1515,6 +1550,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineage.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageFeatureSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageModel.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageOnlineFeature.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureTag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java linguist-generated=true @@ -1530,6 +1573,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPer databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetFeatureLineageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java linguist-generated=true @@ -1543,6 +1588,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionD databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionDownloadUriResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelVersionResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetOnlineStoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionLevelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRegisteredModelPermissionsRequest.java linguist-generated=true @@ -1557,8 +1603,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeatureTagsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeatureTagsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListOnlineStoresRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListOnlineStoresResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponse.java linguist-generated=true @@ -1583,6 +1633,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo. databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Metric.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Model.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java linguist-generated=true @@ -1596,8 +1649,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersion.jav databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStore.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStoreState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpecPublishMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishTableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java linguist-generated=true @@ -1658,10 +1717,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRunResponse.java linguist-generated=true @@ -1788,6 +1849,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineS databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfoHealth.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineTrigger.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true @@ -2681,6 +2743,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualization.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateWidgetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/User.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Visualization.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 5539142bf..47776d949 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,58 @@ ### Internal Changes ### API Changes +* Added `workspaceClient.aiBuilder()` service. +* Added `workspaceClient.featureStore()` service and `workspaceClient.materializedFeatures()` service. +* Added `deleteConversation()`, `listConversations()` and `trashSpace()` methods for `workspaceClient.genie()` service. +* Added `createDatabaseInstanceRole()`, `deleteDatabaseInstanceRole()`, `getDatabaseInstanceRole()` and `listDatabaseInstanceRoles()` methods for `workspaceClient.database()` service. +* Added `connection` and `credential` fields for `com.databricks.sdk.service.catalog.Dependency`. +* Added `rely` field for `com.databricks.sdk.service.catalog.ForeignKeyConstraint`. +* Added `rely` field for `com.databricks.sdk.service.catalog.PrimaryKeyConstraint`. +* Added `securableKindManifest` field for `com.databricks.sdk.service.catalog.TableInfo`. +* Added `securableKindManifest` field for `com.databricks.sdk.service.catalog.TableSummary`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.ClusterAttributes`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.ClusterDetails`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.ClusterSpec`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.CreateCluster`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.CreateInstancePool`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.EditCluster`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.EditInstancePool`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.GetInstancePool`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.InstancePoolAndStats`. +* Added `remoteDiskThroughput` and `totalInitialRemoteDiskSize` fields for `com.databricks.sdk.service.compute.UpdateClusterResource`. +* Added `expirationTime` field for `com.databricks.sdk.service.database.DatabaseCredential`. +* Added `childInstanceRefs`, `effectiveEnableReadableSecondaries`, `effectiveNodeCount`, `effectiveRetentionWindowInDays`, `effectiveStopped`, `enableReadableSecondaries`, `nodeCount`, `parentInstanceRef`, `readOnlyDns` and `retentionWindowInDays` fields for `com.databricks.sdk.service.database.DatabaseInstance`. +* Added `existingPipelineId` field for `com.databricks.sdk.service.database.SyncedTableSpec`. +* Added `lastSync` and `pipelineId` fields for `com.databricks.sdk.service.database.SyncedTableStatus`. +* Added `dbtPlatformOutput` field for `com.databricks.sdk.service.jobs.RunOutput`. +* Added `dbtPlatformTask` field for `com.databricks.sdk.service.jobs.RunTask`. +* Added `dbtPlatformTask` field for `com.databricks.sdk.service.jobs.SubmitTask`. +* Added `dbtPlatformTask` field for `com.databricks.sdk.service.jobs.Task`. +* Added `environment` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. +* Added `environment` field for `com.databricks.sdk.service.pipelines.EditPipeline`. +* Added `environment` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. +* Added `description` field for `com.databricks.sdk.service.serving.CreateServingEndpoint`. +* Added `servedEntityName` field for `com.databricks.sdk.service.serving.Route`. +* Added `description` field for `com.databricks.sdk.service.serving.ServingEndpoint`. +* Added `description` field for `com.databricks.sdk.service.serving.ServingEndpointDetailed`. +* Added `DATABRICKS_ROW_STORE_FORMAT`, `DELTA_UNIFORM_HUDI`, `DELTA_UNIFORM_ICEBERG`, `HIVE`, `ICEBERG`, `MONGODB_FORMAT`, `ORACLE_FORMAT`, `SALESFORCE_DATA_CLOUD_FORMAT` and `TERADATA_FORMAT` enum values for `com.databricks.sdk.service.catalog.DataSourceFormat`. +* Added `METRIC_VIEW` enum value for `com.databricks.sdk.service.catalog.TableType`. +* Added `R` enum value for `com.databricks.sdk.service.compute.Language`. +* Added `CANCELLED`, `ERROR`, `QUEUED`, `RUNNING`, `STARTING` and `SUCCESS` enum values for `com.databricks.sdk.service.jobs.DbtPlatformRunStatus`. +* Added `CONTINUOUS` and `CONTINUOUS_RESTART` enum values for `com.databricks.sdk.service.jobs.TriggerType`. +* Added `APPEND_ONLY` enum value for `com.databricks.sdk.service.pipelines.TableSpecificConfigScdType`. +* [Breaking] Changed `update()` method for `workspaceClient.dashboardWidgets()` service . New request type is `com.databricks.sdk.service.sql.UpdateWidgetRequest` class. +* [Breaking] Changed `create()` method for `workspaceClient.queryVisualizationsLegacy()` service with new required argument order. +* [Breaking] Changed `status` field for `com.databricks.sdk.service.jobs.DbtCloudJobRunStep` to type `com.databricks.sdk.service.jobs.DbtPlatformRunStatus` class. +* Changed `servedModelName` field for `com.databricks.sdk.service.serving.Route` to no longer be required. +* [Breaking] Changed `servedModelName` field for `com.databricks.sdk.service.serving.Route` to no longer be required. +* [Breaking] Removed `workspaceClient.customLlms()` service. +* [Breaking] Removed `generateDownloadFullQueryResult()` and `getDownloadFullQueryResult()` methods for `workspaceClient.genie()` service. +* [Breaking] Removed `includeDeltaMetadata` field for `com.databricks.sdk.service.catalog.ListTablesRequest`. +* [Breaking] Removed `tableServingUrl` field for `com.databricks.sdk.service.database.DatabaseTable`. +* [Breaking] Removed `tableServingUrl` field for `com.databricks.sdk.service.database.SyncedDatabaseTable`. +* [Breaking] Removed `pipelineId` field for `com.databricks.sdk.service.database.SyncedTableSpec`. +* [Breaking] Removed `UNKNOWN_CATALOG_TYPE` enum value for `com.databricks.sdk.service.catalog.CatalogType`. +* [Breaking] Removed `HIVE_CUSTOM` and `HIVE_SERDE` enum values for `com.databricks.sdk.service.catalog.DataSourceFormat`. +* [Breaking] Removed `UNKNOWN_SECURABLE_TYPE` enum value for `com.databricks.sdk.service.catalog.SecurableType`. +* [Breaking] Removed `CANCELLED`, `ERROR`, `QUEUED`, `RUNNING`, `STARTING` and `SUCCESS` enum values for `com.databricks.sdk.service.jobs.DbtCloudRunStatus`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index 8e635b302..a8a5e6435 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -305,8 +305,64 @@ public AccountIpAccessListsAPI ipAccessLists() { } /** - * These APIs manage Log delivery configurations for this account. Log delivery configs enable you - * to configure the delivery of the specified type of logs to your storage account. + * These APIs manage log delivery configurations for this account. The two supported log types for + * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This + * feature works with all account ID types. + * + *
Log delivery works with all account types. However, if your account is on the E2 version of + * the platform or on a select custom plan that allows multiple workspaces per account, you can + * optionally configure different storage destinations for each workspace. Log delivery status is + * also provided to know the latest status of log delivery attempts. + * + *
The high-level flow of billable usage delivery: + * + *
1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. + * Using Databricks APIs, call the Account API to create a [storage configuration + * object](:method:Storage/Create) that uses the bucket name. + * + *
2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details, + * including the required IAM role policies and trust relationship, see [Billable usage log + * delivery]. Using Databricks APIs, call the Account API to create a [credential configuration + * object](:method:Credentials/Create) that uses the IAM role's ARN. + * + *
3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to + * [create a log delivery configuration](:method:LogDelivery/Create) that uses the credential and + * storage configuration objects from previous steps. You can specify if the logs should include + * all events of that log type in your account (_Account level_ delivery) or only events for a + * specific set of workspaces (_workspace level_ delivery). Account level log delivery applies to + * all current and future workspaces plus account level logs, while workspace level log delivery + * solely delivers logs related to the specified workspaces. You can create multiple types of + * delivery configurations per account. + * + *
For billable usage delivery: * For more information about billable usage logs, see [Billable
+ * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
+ * ` For audit log delivery: * For more information about about audit log delivery, see [Audit
+ * log delivery], which includes information about the used JSON schema. * The delivery location
+ * is
+ * ` [Audit log delivery]:
+ * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable
+ * usage log delivery]:
+ * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+ * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
+ * [create a new AWS S3 bucket]:
+ * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*/
public LogDeliveryAPI logDelivery() {
return logDeliveryAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index bf50805dc..bf4f6e180 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -8,8 +8,8 @@
import com.databricks.sdk.mixin.ClustersExt;
import com.databricks.sdk.mixin.DbfsExt;
import com.databricks.sdk.mixin.SecretsExt;
-import com.databricks.sdk.service.aibuilder.CustomLlmsAPI;
-import com.databricks.sdk.service.aibuilder.CustomLlmsService;
+import com.databricks.sdk.service.aibuilder.AiBuilderAPI;
+import com.databricks.sdk.service.aibuilder.AiBuilderService;
import com.databricks.sdk.service.apps.AppsAPI;
import com.databricks.sdk.service.apps.AppsService;
import com.databricks.sdk.service.catalog.ArtifactAllowlistsAPI;
@@ -134,8 +134,12 @@
import com.databricks.sdk.service.marketplace.ProviderProvidersService;
import com.databricks.sdk.service.ml.ExperimentsAPI;
import com.databricks.sdk.service.ml.ExperimentsService;
+import com.databricks.sdk.service.ml.FeatureStoreAPI;
+import com.databricks.sdk.service.ml.FeatureStoreService;
import com.databricks.sdk.service.ml.ForecastingAPI;
import com.databricks.sdk.service.ml.ForecastingService;
+import com.databricks.sdk.service.ml.MaterializedFeaturesAPI;
+import com.databricks.sdk.service.ml.MaterializedFeaturesService;
import com.databricks.sdk.service.ml.ModelRegistryAPI;
import com.databricks.sdk.service.ml.ModelRegistryService;
import com.databricks.sdk.service.pipelines.PipelinesAPI;
@@ -221,6 +225,7 @@ public class WorkspaceClient {
private AccessControlAPI accessControlAPI;
private AccountAccessControlProxyAPI accountAccessControlProxyAPI;
+ private AiBuilderAPI aiBuilderAPI;
private AlertsAPI alertsAPI;
private AlertsLegacyAPI alertsLegacyAPI;
private AlertsV2API alertsV2API;
@@ -242,7 +247,6 @@ public class WorkspaceClient {
private CredentialsAPI credentialsAPI;
private CredentialsManagerAPI credentialsManagerAPI;
private CurrentUserAPI currentUserAPI;
- private CustomLlmsAPI customLlmsAPI;
private DashboardWidgetsAPI dashboardWidgetsAPI;
private DashboardsAPI dashboardsAPI;
private DataSourcesAPI dataSourcesAPI;
@@ -251,6 +255,7 @@ public class WorkspaceClient {
private DbsqlPermissionsAPI dbsqlPermissionsAPI;
private ExperimentsAPI experimentsAPI;
private ExternalLocationsAPI externalLocationsAPI;
+ private FeatureStoreAPI featureStoreAPI;
private FilesAPI filesAPI;
private FunctionsAPI functionsAPI;
private GenieAPI genieAPI;
@@ -265,6 +270,7 @@ public class WorkspaceClient {
private LakeviewAPI lakeviewAPI;
private LakeviewEmbeddedAPI lakeviewEmbeddedAPI;
private LibrariesAPI librariesAPI;
+ private MaterializedFeaturesAPI materializedFeaturesAPI;
private MetastoresAPI metastoresAPI;
private ModelRegistryAPI modelRegistryAPI;
private ModelVersionsAPI modelVersionsAPI;
@@ -332,6 +338,7 @@ public WorkspaceClient(DatabricksConfig config) {
apiClient = new ApiClient(config);
accessControlAPI = new AccessControlAPI(apiClient);
accountAccessControlProxyAPI = new AccountAccessControlProxyAPI(apiClient);
+ aiBuilderAPI = new AiBuilderAPI(apiClient);
alertsAPI = new AlertsAPI(apiClient);
alertsLegacyAPI = new AlertsLegacyAPI(apiClient);
alertsV2API = new AlertsV2API(apiClient);
@@ -353,7 +360,6 @@ public WorkspaceClient(DatabricksConfig config) {
credentialsAPI = new CredentialsAPI(apiClient);
credentialsManagerAPI = new CredentialsManagerAPI(apiClient);
currentUserAPI = new CurrentUserAPI(apiClient);
- customLlmsAPI = new CustomLlmsAPI(apiClient);
dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient);
dashboardsAPI = new DashboardsAPI(apiClient);
dataSourcesAPI = new DataSourcesAPI(apiClient);
@@ -362,6 +368,7 @@ public WorkspaceClient(DatabricksConfig config) {
dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient);
experimentsAPI = new ExperimentsAPI(apiClient);
externalLocationsAPI = new ExternalLocationsAPI(apiClient);
+ featureStoreAPI = new FeatureStoreAPI(apiClient);
filesAPI = new FilesAPI(apiClient);
functionsAPI = new FunctionsAPI(apiClient);
genieAPI = new GenieAPI(apiClient);
@@ -376,6 +383,7 @@ public WorkspaceClient(DatabricksConfig config) {
lakeviewAPI = new LakeviewAPI(apiClient);
lakeviewEmbeddedAPI = new LakeviewEmbeddedAPI(apiClient);
librariesAPI = new LibrariesAPI(apiClient);
+ materializedFeaturesAPI = new MaterializedFeaturesAPI(apiClient);
metastoresAPI = new MetastoresAPI(apiClient);
modelRegistryAPI = new ModelRegistryAPI(apiClient);
modelVersionsAPI = new ModelVersionsAPI(apiClient);
@@ -462,6 +470,11 @@ public AccountAccessControlProxyAPI accountAccessControlProxy() {
return accountAccessControlProxyAPI;
}
+ /** The Custom LLMs service manages state and powers the UI for the Custom LLM product. */
+ public AiBuilderAPI aiBuilder() {
+ return aiBuilderAPI;
+ }
+
/**
* The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL
* object that periodically runs a query, evaluates a condition of its result, and notifies one or
@@ -680,11 +693,6 @@ public CurrentUserAPI currentUser() {
return currentUserAPI;
}
- /** The Custom LLMs service manages state and powers the UI for the Custom LLM product. */
- public CustomLlmsAPI customLlms() {
- return customLlmsAPI;
- }
-
/**
* This is an evolving API that facilitates the addition and removal of widgets from existing
* dashboards within the Databricks Workspace. Data structures may change over time.
@@ -787,6 +795,18 @@ public ExternalLocationsAPI externalLocations() {
return externalLocationsAPI;
}
+ /**
+ * A feature store is a centralized repository that enables data scientists to find and share
+ * features. Using a feature store also ensures that the code used to compute feature values is
+ * the same during model training and when the model is used for inference.
+ *
+ * An online store is a low-latency database used for feature lookup during real-time model
+ * inference or serve feature for real-time applications.
+ */
+ public FeatureStoreAPI featureStore() {
+ return featureStoreAPI;
+ }
+
/**
* The Files API is a standard HTTP API that allows you to read, write, list, and delete files and
* directories by referring to their URI. The API makes working with file content as raw bytes
@@ -1003,6 +1023,14 @@ public LibrariesAPI libraries() {
return librariesAPI;
}
+ /**
+ * Materialized Features are columns in tables and views that can be directly used as features to
+ * train and serve ML models.
+ */
+ public MaterializedFeaturesAPI materializedFeatures() {
+ return materializedFeaturesAPI;
+ }
+
/**
* A metastore is the top-level container of objects in Unity Catalog. It stores data assets
* (tables and views) and the permissions that govern access to them. Databricks account admins
@@ -1809,6 +1837,17 @@ public WorkspaceClient withAccountAccessControlProxyAPI(
return this;
}
+ /** Replace the default AiBuilderService with a custom implementation. */
+ public WorkspaceClient withAiBuilderImpl(AiBuilderService aiBuilder) {
+ return this.withAiBuilderAPI(new AiBuilderAPI(aiBuilder));
+ }
+
+ /** Replace the default AiBuilderAPI with a custom implementation. */
+ public WorkspaceClient withAiBuilderAPI(AiBuilderAPI aiBuilder) {
+ this.aiBuilderAPI = aiBuilder;
+ return this;
+ }
+
/** Replace the default AlertsService with a custom implementation. */
public WorkspaceClient withAlertsImpl(AlertsService alerts) {
return this.withAlertsAPI(new AlertsAPI(alerts));
@@ -2046,17 +2085,6 @@ public WorkspaceClient withCurrentUserAPI(CurrentUserAPI currentUser) {
return this;
}
- /** Replace the default CustomLlmsService with a custom implementation. */
- public WorkspaceClient withCustomLlmsImpl(CustomLlmsService customLlms) {
- return this.withCustomLlmsAPI(new CustomLlmsAPI(customLlms));
- }
-
- /** Replace the default CustomLlmsAPI with a custom implementation. */
- public WorkspaceClient withCustomLlmsAPI(CustomLlmsAPI customLlms) {
- this.customLlmsAPI = customLlms;
- return this;
- }
-
/** Replace the default DashboardWidgetsService with a custom implementation. */
public WorkspaceClient withDashboardWidgetsImpl(DashboardWidgetsService dashboardWidgets) {
return this.withDashboardWidgetsAPI(new DashboardWidgetsAPI(dashboardWidgets));
@@ -2145,6 +2173,17 @@ public WorkspaceClient withExternalLocationsAPI(ExternalLocationsAPI externalLoc
return this;
}
+ /** Replace the default FeatureStoreService with a custom implementation. */
+ public WorkspaceClient withFeatureStoreImpl(FeatureStoreService featureStore) {
+ return this.withFeatureStoreAPI(new FeatureStoreAPI(featureStore));
+ }
+
+ /** Replace the default FeatureStoreAPI with a custom implementation. */
+ public WorkspaceClient withFeatureStoreAPI(FeatureStoreAPI featureStore) {
+ this.featureStoreAPI = featureStore;
+ return this;
+ }
+
/** Replace the default FilesService with a custom implementation. */
public WorkspaceClient withFilesImpl(FilesService files) {
return this.withFilesAPI(new FilesAPI(files));
@@ -2299,6 +2338,18 @@ public WorkspaceClient withLibrariesAPI(LibrariesAPI libraries) {
return this;
}
+ /** Replace the default MaterializedFeaturesService with a custom implementation. */
+ public WorkspaceClient withMaterializedFeaturesImpl(
+ MaterializedFeaturesService materializedFeatures) {
+ return this.withMaterializedFeaturesAPI(new MaterializedFeaturesAPI(materializedFeatures));
+ }
+
+ /** Replace the default MaterializedFeaturesAPI with a custom implementation. */
+ public WorkspaceClient withMaterializedFeaturesAPI(MaterializedFeaturesAPI materializedFeatures) {
+ this.materializedFeaturesAPI = materializedFeatures;
+ return this;
+ }
+
/** Replace the default MetastoresService with a custom implementation. */
public WorkspaceClient withMetastoresImpl(MetastoresService metastores) {
return this.withMetastoresAPI(new MetastoresAPI(metastores));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderAPI.java
new file mode 100755
index 000000000..4f1609be3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderAPI.java
@@ -0,0 +1,85 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** The Custom LLMs service manages state and powers the UI for the Custom LLM product. */
+@Generated
+public class AiBuilderAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(AiBuilderAPI.class);
+
+ private final AiBuilderService impl;
+
+ /** Regular-use constructor */
+ public AiBuilderAPI(ApiClient apiClient) {
+ impl = new AiBuilderImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public AiBuilderAPI(AiBuilderService mock) {
+ impl = mock;
+ }
+
+ public void cancelOptimize(String id) {
+ cancelOptimize(new CancelCustomLlmOptimizationRunRequest().setId(id));
+ }
+
+ /** Cancel a Custom LLM Optimization Run. */
+ public void cancelOptimize(CancelCustomLlmOptimizationRunRequest request) {
+ impl.cancelOptimize(request);
+ }
+
+ public CustomLlm createCustomLlm(String name, String instructions) {
+ return createCustomLlm(
+ new CreateCustomLlmRequest().setName(name).setInstructions(instructions));
+ }
+
+ /** Create a Custom LLM. */
+ public CustomLlm createCustomLlm(CreateCustomLlmRequest request) {
+ return impl.createCustomLlm(request);
+ }
+
+ public void deleteCustomLlm(String id) {
+ deleteCustomLlm(new DeleteCustomLlmRequest().setId(id));
+ }
+
+ /** Delete a Custom LLM. */
+ public void deleteCustomLlm(DeleteCustomLlmRequest request) {
+ impl.deleteCustomLlm(request);
+ }
+
+ public CustomLlm getCustomLlm(String id) {
+ return getCustomLlm(new GetCustomLlmRequest().setId(id));
+ }
+
+ /** Get a Custom LLM. */
+ public CustomLlm getCustomLlm(GetCustomLlmRequest request) {
+ return impl.getCustomLlm(request);
+ }
+
+ public CustomLlm startOptimize(String id) {
+ return startOptimize(new StartCustomLlmOptimizationRunRequest().setId(id));
+ }
+
+ /** Start a Custom LLM Optimization Run. */
+ public CustomLlm startOptimize(StartCustomLlmOptimizationRunRequest request) {
+ return impl.startOptimize(request);
+ }
+
+ public CustomLlm updateCustomLlm(String id, CustomLlm customLlm, String updateMask) {
+ return updateCustomLlm(
+ new UpdateCustomLlmRequest().setId(id).setCustomLlm(customLlm).setUpdateMask(updateMask));
+ }
+
+ /** Update a Custom LLM. */
+ public CustomLlm updateCustomLlm(UpdateCustomLlmRequest request) {
+ return impl.updateCustomLlm(request);
+ }
+
+ public AiBuilderService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java
similarity index 60%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java
index e954adaa6..4b7e6f09c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java
@@ -7,32 +7,32 @@
import com.databricks.sdk.support.Generated;
import java.io.IOException;
-/** Package-local implementation of CustomLlms */
+/** Package-local implementation of AiBuilder */
@Generated
-class CustomLlmsImpl implements CustomLlmsService {
+class AiBuilderImpl implements AiBuilderService {
private final ApiClient apiClient;
- public CustomLlmsImpl(ApiClient apiClient) {
+ public AiBuilderImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
@Override
- public void cancel(CancelCustomLlmOptimizationRunRequest request) {
+ public void cancelOptimize(CancelCustomLlmOptimizationRunRequest request) {
String path = String.format("/api/2.0/custom-llms/%s/optimize/cancel", request.getId());
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, CancelResponse.class);
+ apiClient.execute(req, CancelOptimizeResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public CustomLlm create(StartCustomLlmOptimizationRunRequest request) {
- String path = String.format("/api/2.0/custom-llms/%s/optimize", request.getId());
+ public CustomLlm createCustomLlm(CreateCustomLlmRequest request) {
+ String path = "/api/2.0/custom-llms";
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
@@ -45,7 +45,20 @@ public CustomLlm create(StartCustomLlmOptimizationRunRequest request) {
}
@Override
- public CustomLlm get(GetCustomLlmRequest request) {
+ public void deleteCustomLlm(DeleteCustomLlmRequest request) {
+ String path = String.format("/api/2.0/custom-lms/%s", request.getId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, DeleteCustomLlmResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public CustomLlm getCustomLlm(GetCustomLlmRequest request) {
String path = String.format("/api/2.0/custom-llms/%s", request.getId());
try {
Request req = new Request("GET", path);
@@ -58,7 +71,21 @@ public CustomLlm get(GetCustomLlmRequest request) {
}
@Override
- public CustomLlm update(UpdateCustomLlmRequest request) {
+ public CustomLlm startOptimize(StartCustomLlmOptimizationRunRequest request) {
+ String path = String.format("/api/2.0/custom-llms/%s/optimize", request.getId());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, CustomLlm.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public CustomLlm updateCustomLlm(UpdateCustomLlmRequest request) {
String path = String.format("/api/2.0/custom-llms/%s", request.getId());
try {
Request req = new Request("PATCH", path, apiClient.serialize(request));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderService.java
similarity index 50%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderService.java
index 5f4b4246c..33129cd1a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderService.java
@@ -11,16 +11,23 @@
* Evolving: this interface is under development. Method signatures may change.
*/
@Generated
-public interface CustomLlmsService {
+public interface AiBuilderService {
/** Cancel a Custom LLM Optimization Run. */
- void cancel(CancelCustomLlmOptimizationRunRequest cancelCustomLlmOptimizationRunRequest);
+ void cancelOptimize(CancelCustomLlmOptimizationRunRequest cancelCustomLlmOptimizationRunRequest);
- /** Start a Custom LLM Optimization Run. */
- CustomLlm create(StartCustomLlmOptimizationRunRequest startCustomLlmOptimizationRunRequest);
+ /** Create a Custom LLM. */
+ CustomLlm createCustomLlm(CreateCustomLlmRequest createCustomLlmRequest);
+
+ /** Delete a Custom LLM. */
+ void deleteCustomLlm(DeleteCustomLlmRequest deleteCustomLlmRequest);
/** Get a Custom LLM. */
- CustomLlm get(GetCustomLlmRequest getCustomLlmRequest);
+ CustomLlm getCustomLlm(GetCustomLlmRequest getCustomLlmRequest);
+
+ /** Start a Custom LLM Optimization Run. */
+ CustomLlm startOptimize(
+ StartCustomLlmOptimizationRunRequest startCustomLlmOptimizationRunRequest);
/** Update a Custom LLM. */
- CustomLlm update(UpdateCustomLlmRequest updateCustomLlmRequest);
+ CustomLlm updateCustomLlm(UpdateCustomLlmRequest updateCustomLlmRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelOptimizeResponse.java
similarity index 83%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelOptimizeResponse.java
index 62f4aac5b..d9199de50 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelOptimizeResponse.java
@@ -7,7 +7,7 @@
import java.util.Objects;
@Generated
-public class CancelResponse {
+public class CancelOptimizeResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(CancelResponse.class).toString();
+ return new ToStringer(CancelOptimizeResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CreateCustomLlmRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CreateCustomLlmRequest.java
new file mode 100755
index 000000000..4d7e192cc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CreateCustomLlmRequest.java
@@ -0,0 +1,112 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CreateCustomLlmRequest {
+ /**
+ * Optional: UC path for agent artifacts. If you are using a dataset that you only have read
+ * permissions, please provide a destination path where you have write permissions. Please provide
+ * this in catalog.schema format.
+ */
+ @JsonProperty("agent_artifact_path")
+ private String agentArtifactPath;
+
+ /**
+ * Datasets used for training and evaluating the model, not for inference. Currently, only 1
+ * dataset is accepted.
+ */
+ @JsonProperty("datasets")
+ private Collection Creates a new app.
- */
+ /** Creates a new app. */
public Wait Deletes an app.
- */
+ /** Deletes an app. */
public App delete(DeleteAppRequest request) {
return impl.delete(request);
}
@@ -212,11 +204,7 @@ public Wait Creates an app deployment for the app with the supplied name.
- */
+ /** Creates an app deployment for the app with the supplied name. */
public Wait Retrieves information for the app with the supplied name.
- */
+ /** Retrieves information for the app with the supplied name. */
public App get(GetAppRequest request) {
return impl.get(request);
}
@@ -244,11 +228,7 @@ public AppDeployment getDeployment(String appName, String deploymentId) {
new GetAppDeploymentRequest().setAppName(appName).setDeploymentId(deploymentId));
}
- /**
- * Get an app deployment.
- *
- * Retrieves information for the app deployment with the supplied name and deployment id.
- */
+ /** Retrieves information for the app deployment with the supplied name and deployment id. */
public AppDeployment getDeployment(GetAppDeploymentRequest request) {
return impl.getDeployment(request);
}
@@ -257,11 +237,7 @@ public GetAppPermissionLevelsResponse getPermissionLevels(String appName) {
return getPermissionLevels(new GetAppPermissionLevelsRequest().setAppName(appName));
}
- /**
- * Get app permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
public GetAppPermissionLevelsResponse getPermissionLevels(GetAppPermissionLevelsRequest request) {
return impl.getPermissionLevels(request);
}
@@ -270,20 +246,12 @@ public AppPermissions getPermissions(String appName) {
return getPermissions(new GetAppPermissionsRequest().setAppName(appName));
}
- /**
- * Get app permissions.
- *
- * Gets the permissions of an app. Apps can inherit permissions from their root object.
- */
+ /** Gets the permissions of an app. Apps can inherit permissions from their root object. */
public AppPermissions getPermissions(GetAppPermissionsRequest request) {
return impl.getPermissions(request);
}
- /**
- * List apps.
- *
- * Lists all apps in the workspace.
- */
+ /** Lists all apps in the workspace. */
public Iterable Lists all app deployments for the app with the supplied name.
- */
+ /** Lists all app deployments for the app with the supplied name. */
public Iterable Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
public AppPermissions setPermissions(AppPermissionsRequest request) {
return impl.setPermissions(request);
@@ -340,11 +301,7 @@ public Wait Start the last active deployment of the app in the workspace.
- */
+ /** Start the last active deployment of the app in the workspace. */
public Wait Stops the active deployment of the app in the workspace.
- */
+ /** Stops the active deployment of the app in the workspace. */
public Wait Updates the app with the supplied name.
- */
+ /** Updates the app with the supplied name. */
public App update(UpdateAppRequest request) {
return impl.update(request);
}
@@ -383,11 +332,7 @@ public AppPermissions updatePermissions(String appName) {
return updatePermissions(new AppPermissionsRequest().setAppName(appName));
}
- /**
- * Update app permissions.
- *
- * Updates the permissions on an app. Apps can inherit permissions from their root object.
- */
+ /** Updates the permissions on an app. Apps can inherit permissions from their root object. */
public AppPermissions updatePermissions(AppPermissionsRequest request) {
return impl.updatePermissions(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java
index d5909455c..9e5b895bf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java
@@ -13,104 +13,49 @@
*/
@Generated
public interface AppsService {
- /**
- * Create an app.
- *
- * Creates a new app.
- */
+ /** Creates a new app. */
App create(CreateAppRequest createAppRequest);
- /**
- * Delete an app.
- *
- * Deletes an app.
- */
+ /** Deletes an app. */
App delete(DeleteAppRequest deleteAppRequest);
- /**
- * Create an app deployment.
- *
- * Creates an app deployment for the app with the supplied name.
- */
+ /** Creates an app deployment for the app with the supplied name. */
AppDeployment deploy(CreateAppDeploymentRequest createAppDeploymentRequest);
- /**
- * Get an app.
- *
- * Retrieves information for the app with the supplied name.
- */
+ /** Retrieves information for the app with the supplied name. */
App get(GetAppRequest getAppRequest);
- /**
- * Get an app deployment.
- *
- * Retrieves information for the app deployment with the supplied name and deployment id.
- */
+ /** Retrieves information for the app deployment with the supplied name and deployment id. */
AppDeployment getDeployment(GetAppDeploymentRequest getAppDeploymentRequest);
- /**
- * Get app permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
GetAppPermissionLevelsResponse getPermissionLevels(
GetAppPermissionLevelsRequest getAppPermissionLevelsRequest);
- /**
- * Get app permissions.
- *
- * Gets the permissions of an app. Apps can inherit permissions from their root object.
- */
+ /** Gets the permissions of an app. Apps can inherit permissions from their root object. */
AppPermissions getPermissions(GetAppPermissionsRequest getAppPermissionsRequest);
- /**
- * List apps.
- *
- * Lists all apps in the workspace.
- */
+ /** Lists all apps in the workspace. */
ListAppsResponse list(ListAppsRequest listAppsRequest);
- /**
- * List app deployments.
- *
- * Lists all app deployments for the app with the supplied name.
- */
+ /** Lists all app deployments for the app with the supplied name. */
ListAppDeploymentsResponse listDeployments(ListAppDeploymentsRequest listAppDeploymentsRequest);
/**
- * Set app permissions.
- *
- * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
AppPermissions setPermissions(AppPermissionsRequest appPermissionsRequest);
- /**
- * Start an app.
- *
- * Start the last active deployment of the app in the workspace.
- */
+ /** Start the last active deployment of the app in the workspace. */
App start(StartAppRequest startAppRequest);
- /**
- * Stop an app.
- *
- * Stops the active deployment of the app in the workspace.
- */
+ /** Stops the active deployment of the app in the workspace. */
App stop(StopAppRequest stopAppRequest);
- /**
- * Update an app.
- *
- * Updates the app with the supplied name.
- */
+ /** Updates the app with the supplied name. */
App update(UpdateAppRequest updateAppRequest);
- /**
- * Update app permissions.
- *
- * Updates the permissions on an app. Apps can inherit permissions from their root object.
- */
+ /** Updates the permissions on an app. Apps can inherit permissions from their root object. */
AppPermissions updatePermissions(AppPermissionsRequest appPermissionsRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
index 1d0425673..06ac40723 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create an app deployment */
@Generated
public class CreateAppDeploymentRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java
index 0a2d2eb59..d0ba8acd5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java
@@ -9,7 +9,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create an app */
@Generated
public class CreateAppRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java
index 16958972e..d3cef172d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete an app */
@Generated
public class DeleteAppRequest {
/** The name of the app. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java
index 6f34447d1..7b3931c59 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get an app deployment */
@Generated
public class GetAppDeploymentRequest {
/** The name of the app. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java
index bb2651075..118333a02 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get app permission levels */
@Generated
public class GetAppPermissionLevelsRequest {
/** The app for which to get or manage permissions. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java
index bc44a9252..f0f41d68a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionsRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get app permissions */
@Generated
public class GetAppPermissionsRequest {
/** The app for which to get or manage permissions. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java
index ec758e4b7..1465d4829 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get an app */
@Generated
public class GetAppRequest {
/** The name of the app. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java
index 521799ca5..c6601522e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List app deployments */
@Generated
public class ListAppDeploymentsRequest {
/** The name of the app. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java
index be3a51190..e62e4e319 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List apps */
@Generated
public class ListAppsRequest {
/** Upper bound for items returned. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java
index 09baf63ac..c150f859a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Update an app */
@Generated
public class UpdateAppRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java
index c5c8e08ab..2df43b356 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java
@@ -31,9 +31,7 @@ public DownloadResponse download(String startMonth, String endMonth) {
}
/**
- * Return billable usage logs.
- *
- * Returns billable usage logs in CSV format for the specified account and date range. For the
+ * Returns billable usage logs in CSV format for the specified account and date range. For the
* data schema, see [CSV file schema]. Note that this method might take multiple minutes to
* complete.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java
index c4726ccf2..1309f5914 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java
@@ -14,9 +14,7 @@
@Generated
public interface BillableUsageService {
/**
- * Return billable usage logs.
- *
- * Returns billable usage logs in CSV format for the specified account and date range. For the
+ * Returns billable usage logs in CSV format for the specified account and date range. For the
* data schema, see [CSV file schema]. Note that this method might take multiple minutes to
* complete.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
index 39b840571..3ab0a31bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
@@ -24,11 +24,7 @@ public BudgetPolicyAPI(BudgetPolicyService mock) {
impl = mock;
}
- /**
- * Create a budget policy.
- *
- * Creates a new policy.
- */
+ /** Creates a new policy. */
public BudgetPolicy create(CreateBudgetPolicyRequest request) {
return impl.create(request);
}
@@ -37,11 +33,7 @@ public void delete(String policyId) {
delete(new DeleteBudgetPolicyRequest().setPolicyId(policyId));
}
- /**
- * Delete a budget policy.
- *
- * Deletes a policy
- */
+ /** Deletes a policy */
public void delete(DeleteBudgetPolicyRequest request) {
impl.delete(request);
}
@@ -50,20 +42,13 @@ public BudgetPolicy get(String policyId) {
return get(new GetBudgetPolicyRequest().setPolicyId(policyId));
}
- /**
- * Get a budget policy.
- *
- * Retrieves a policy by it's ID.
- */
+ /** Retrieves a policy by it's ID. */
public BudgetPolicy get(GetBudgetPolicyRequest request) {
return impl.get(request);
}
/**
- * List policies.
- *
- * Lists all policies. Policies are returned in the alphabetically ascending order of their
- * names.
+ * Lists all policies. Policies are returned in the alphabetically ascending order of their names.
*/
public Iterable Updates a policy
- */
+ /** Updates a policy */
public BudgetPolicy update(UpdateBudgetPolicyRequest request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyService.java
index a2ff724d4..2fb7b4b81 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyService.java
@@ -12,39 +12,20 @@
*/
@Generated
public interface BudgetPolicyService {
- /**
- * Create a budget policy.
- *
- * Creates a new policy.
- */
+ /** Creates a new policy. */
BudgetPolicy create(CreateBudgetPolicyRequest createBudgetPolicyRequest);
- /**
- * Delete a budget policy.
- *
- * Deletes a policy
- */
+ /** Deletes a policy */
void delete(DeleteBudgetPolicyRequest deleteBudgetPolicyRequest);
- /**
- * Get a budget policy.
- *
- * Retrieves a policy by it's ID.
- */
+ /** Retrieves a policy by it's ID. */
BudgetPolicy get(GetBudgetPolicyRequest getBudgetPolicyRequest);
/**
- * List policies.
- *
- * Lists all policies. Policies are returned in the alphabetically ascending order of their
- * names.
+ * Lists all policies. Policies are returned in the alphabetically ascending order of their names.
*/
ListBudgetPoliciesResponse list(ListBudgetPoliciesRequest listBudgetPoliciesRequest);
- /**
- * Update a budget policy.
- *
- * Updates a policy
- */
+ /** Updates a policy */
BudgetPolicy update(UpdateBudgetPolicyRequest updateBudgetPolicyRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
index 5df93d61d..f45659d44 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
@@ -33,9 +33,7 @@ public CreateBudgetConfigurationResponse create(CreateBudgetConfigurationBudget
}
/**
- * Create new budget.
- *
- * Create a new budget configuration for an account. For full details, see
+ * Create a new budget configuration for an account. For full details, see
* https://docs.databricks.com/en/admin/account-settings/budgets.html.
*/
public CreateBudgetConfigurationResponse create(CreateBudgetConfigurationRequest request) {
@@ -47,9 +45,7 @@ public void delete(String budgetId) {
}
/**
- * Delete budget.
- *
- * Deletes a budget configuration for an account. Both account and budget configuration are
+ * Deletes a budget configuration for an account. Both account and budget configuration are
* specified by ID. This cannot be undone.
*/
public void delete(DeleteBudgetConfigurationRequest request) {
@@ -61,20 +57,14 @@ public GetBudgetConfigurationResponse get(String budgetId) {
}
/**
- * Get budget.
- *
- * Gets a budget configuration for an account. Both account and budget configuration are
- * specified by ID.
+ * Gets a budget configuration for an account. Both account and budget configuration are specified
+ * by ID.
*/
public GetBudgetConfigurationResponse get(GetBudgetConfigurationRequest request) {
return impl.get(request);
}
- /**
- * Get all budgets.
- *
- * Gets all budgets associated with this account.
- */
+ /** Gets all budgets associated with this account. */
public Iterable Updates a budget configuration for an account. Both account and budget configuration are
+ * Updates a budget configuration for an account. Both account and budget configuration are
* specified by ID.
*/
public UpdateBudgetConfigurationResponse update(UpdateBudgetConfigurationRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsService.java
index 468e0fc84..57acf86d2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsService.java
@@ -15,42 +15,30 @@
@Generated
public interface BudgetsService {
/**
- * Create new budget.
- *
- * Create a new budget configuration for an account. For full details, see
+ * Create a new budget configuration for an account. For full details, see
* https://docs.databricks.com/en/admin/account-settings/budgets.html.
*/
CreateBudgetConfigurationResponse create(
CreateBudgetConfigurationRequest createBudgetConfigurationRequest);
/**
- * Delete budget.
- *
- * Deletes a budget configuration for an account. Both account and budget configuration are
+ * Deletes a budget configuration for an account. Both account and budget configuration are
* specified by ID. This cannot be undone.
*/
void delete(DeleteBudgetConfigurationRequest deleteBudgetConfigurationRequest);
/**
- * Get budget.
- *
- * Gets a budget configuration for an account. Both account and budget configuration are
- * specified by ID.
+ * Gets a budget configuration for an account. Both account and budget configuration are specified
+ * by ID.
*/
GetBudgetConfigurationResponse get(GetBudgetConfigurationRequest getBudgetConfigurationRequest);
- /**
- * Get all budgets.
- *
- * Gets all budgets associated with this account.
- */
+ /** Gets all budgets associated with this account. */
ListBudgetConfigurationsResponse list(
ListBudgetConfigurationsRequest listBudgetConfigurationsRequest);
/**
- * Modify budget.
- *
- * Updates a budget configuration for an account. Both account and budget configuration are
+ * Updates a budget configuration for an account. Both account and budget configuration are
* specified by ID.
*/
UpdateBudgetConfigurationResponse update(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java
index 61cac47b8..8ba08d8c9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete budget */
@Generated
public class DeleteBudgetConfigurationRequest {
/** The Databricks budget configuration ID. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java
index d5aabfb58..c24c6a080 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a budget policy */
@Generated
public class DeleteBudgetPolicyRequest {
/** The Id of the policy. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java
index 1db3a94b9..bbf8fa046 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Return billable usage logs */
@Generated
public class DownloadRequest {
/** Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java
index e51a9a075..67b8f9d66 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingUsageDashboardRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get usage dashboard */
@Generated
public class GetBillingUsageDashboardRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java
index 6e34027da..0773e9b5e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get budget */
@Generated
public class GetBudgetConfigurationRequest {
/** The budget configuration ID */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java
index 12c33b5cd..b85c555b9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a budget policy */
@Generated
public class GetBudgetPolicyRequest {
/** The Id of the policy. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java
index 2f05db001..b5f8b134f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get log delivery configuration */
@Generated
public class GetLogDeliveryRequest {
/** The log delivery configuration id of customer */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java
index 166d36749..2807a6956 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get all budgets */
@Generated
public class ListBudgetConfigurationsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java
index bfe3035ac..8871004fb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List policies */
@Generated
public class ListBudgetPoliciesRequest {
/** A filter to apply to the list of policies. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java
index fedbbd71c..e682a4d86 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get all log delivery configurations */
@Generated
public class ListLogDeliveryRequest {
/** The Credentials id to filter the search results with */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
index bab132ce3..a9e33b5f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
@@ -8,8 +8,63 @@
import org.slf4j.LoggerFactory;
/**
- * These APIs manage Log delivery configurations for this account. Log delivery configs enable you
- * to configure the delivery of the specified type of logs to your storage account.
+ * These APIs manage log delivery configurations for this account. The two supported log types for
+ * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
+ * feature works with all account ID types.
+ *
+ * Log delivery works with all account types. However, if your account is on the E2 version of
+ * the platform or on a select custom plan that allows multiple workspaces per account, you can
+ * optionally configure different storage destinations for each workspace. Log delivery status is
+ * also provided to know the latest status of log delivery attempts.
+ *
+ * The high-level flow of billable usage delivery:
+ *
+ * 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
+ * Using Databricks APIs, call the Account API to create a [storage configuration
+ * object](:method:Storage/Create) that uses the bucket name.
+ *
+ * 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details,
+ * including the required IAM role policies and trust relationship, see [Billable usage log
+ * delivery]. Using Databricks APIs, call the Account API to create a [credential configuration
+ * object](:method:Credentials/Create) that uses the IAM role's ARN.
+ *
+ * 3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to
+ * [create a log delivery configuration](:method:LogDelivery/Create) that uses the credential and
+ * storage configuration objects from previous steps. You can specify if the logs should include all
+ * events of that log type in your account (_Account level_ delivery) or only events for a specific
+ * set of workspaces (_workspace level_ delivery). Account level log delivery applies to all current
+ * and future workspaces plus account level logs, while workspace level log delivery solely delivers
+ * logs related to the specified workspaces. You can create multiple types of delivery
+ * configurations per account.
+ *
+ * For billable usage delivery: * For more information about billable usage logs, see [Billable
+ * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
+ * ` For audit log delivery: * For more information about about audit log delivery, see [Audit log
+ * delivery], which includes information about the used JSON schema. * The delivery location is
+ * ` [Audit log delivery]:
+ * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
+ * log delivery]:
+ * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+ * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
+ * [create a new AWS S3 bucket]:
+ * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*/
@Generated
public class LogDeliveryAPI {
@@ -35,10 +90,8 @@ public WrappedLogDeliveryConfiguration create(
}
/**
- * Create a new log delivery configuration.
- *
- * Creates a new Databricks log delivery configuration to enable delivery of the specified type
- * of logs to your storage location. This requires that you already created a [credential
+ * Creates a new Databricks log delivery configuration to enable delivery of the specified type of
+ * logs to your storage location. This requires that you already created a [credential
* object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a
* [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
*
@@ -70,20 +123,12 @@ public GetLogDeliveryConfigurationResponse get(String logDeliveryConfigurationId
new GetLogDeliveryRequest().setLogDeliveryConfigurationId(logDeliveryConfigurationId));
}
- /**
- * Get log delivery configuration.
- *
- * Gets a Databricks log delivery configuration object for an account, both specified by ID.
- */
+ /** Gets a Databricks log delivery configuration object for an account, both specified by ID. */
public GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest request) {
return impl.get(request);
}
- /**
- * Get all log delivery configurations.
- *
- * Gets all Databricks log delivery configurations associated with an account specified by ID.
- */
+ /** Gets all Databricks log delivery configurations associated with an account specified by ID. */
public Iterable Enables or disables a log delivery configuration. Deletion of delivery configurations is not
+ * Enables or disables a log delivery configuration. Deletion of delivery configurations is not
* supported, so disable log delivery configurations that are no longer needed. Note that you
* can't re-enable a delivery configuration if this would violate the delivery configuration
* limits described under [Create log delivery](:method:LogDelivery/Create).
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
index 8e66ac799..9c4796014 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
@@ -4,8 +4,63 @@
import com.databricks.sdk.support.Generated;
/**
- * These APIs manage Log delivery configurations for this account. Log delivery configs enable you
- * to configure the delivery of the specified type of logs to your storage account.
+ * These APIs manage log delivery configurations for this account. The two supported log types for
+ * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
+ * feature works with all account ID types.
+ *
+ * Log delivery works with all account types. However, if your account is on the E2 version of
+ * the platform or on a select custom plan that allows multiple workspaces per account, you can
+ * optionally configure different storage destinations for each workspace. Log delivery status is
+ * also provided to know the latest status of log delivery attempts.
+ *
+ * The high-level flow of billable usage delivery:
+ *
+ * 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
+ * Using Databricks APIs, call the Account API to create a [storage configuration
+ * object](:method:Storage/Create) that uses the bucket name.
+ *
+ * 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details,
+ * including the required IAM role policies and trust relationship, see [Billable usage log
+ * delivery]. Using Databricks APIs, call the Account API to create a [credential configuration
+ * object](:method:Credentials/Create) that uses the IAM role's ARN.
+ *
+ * 3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to
+ * [create a log delivery configuration](:method:LogDelivery/Create) that uses the credential and
+ * storage configuration objects from previous steps. You can specify if the logs should include all
+ * events of that log type in your account (_Account level_ delivery) or only events for a specific
+ * set of workspaces (_workspace level_ delivery). Account level log delivery applies to all current
+ * and future workspaces plus account level logs, while workspace level log delivery solely delivers
+ * logs related to the specified workspaces. You can create multiple types of delivery
+ * configurations per account.
+ *
+ * For billable usage delivery: * For more information about billable usage logs, see [Billable
+ * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
+ * ` For audit log delivery: * For more information about about audit log delivery, see [Audit log
+ * delivery], which includes information about the used JSON schema. * The delivery location is
+ * ` [Audit log delivery]:
+ * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
+ * log delivery]:
+ * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+ * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
+ * [create a new AWS S3 bucket]:
+ * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*
* This is the high-level interface, that contains generated methods.
*
@@ -14,10 +69,8 @@
@Generated
public interface LogDeliveryService {
/**
- * Create a new log delivery configuration.
- *
- * Creates a new Databricks log delivery configuration to enable delivery of the specified type
- * of logs to your storage location. This requires that you already created a [credential
+ * Creates a new Databricks log delivery configuration to enable delivery of the specified type of
+ * logs to your storage location. This requires that you already created a [credential
* object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a
* [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
*
@@ -43,24 +96,14 @@ public interface LogDeliveryService {
WrappedLogDeliveryConfiguration create(
WrappedCreateLogDeliveryConfiguration wrappedCreateLogDeliveryConfiguration);
- /**
- * Get log delivery configuration.
- *
- * Gets a Databricks log delivery configuration object for an account, both specified by ID.
- */
+ /** Gets a Databricks log delivery configuration object for an account, both specified by ID. */
GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest getLogDeliveryRequest);
- /**
- * Get all log delivery configurations.
- *
- * Gets all Databricks log delivery configurations associated with an account specified by ID.
- */
+ /** Gets all Databricks log delivery configurations associated with an account specified by ID. */
WrappedLogDeliveryConfigurations list(ListLogDeliveryRequest listLogDeliveryRequest);
/**
- * Enable or disable log delivery configuration.
- *
- * Enables or disables a log delivery configuration. Deletion of delivery configurations is not
+ * Enables or disables a log delivery configuration. Deletion of delivery configurations is not
* supported, so disable log delivery configurations that are no longer needed. Note that you
* can't re-enable a delivery configuration if this would violate the delivery configuration
* limits described under [Create log delivery](:method:LogDelivery/Create).
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
index d7216e802..42ae51679 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
@@ -9,7 +9,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Update a budget policy */
@Generated
public class UpdateBudgetPolicyRequest {
/** DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java
index 93573bbda..72dca2cf8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java
@@ -27,20 +27,12 @@ public UsageDashboardsAPI(UsageDashboardsService mock) {
impl = mock;
}
- /**
- * Create new usage dashboard.
- *
- * Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
- */
+ /** Create a usage dashboard specified by workspaceId, accountId, and dashboard type. */
public CreateBillingUsageDashboardResponse create(CreateBillingUsageDashboardRequest request) {
return impl.create(request);
}
- /**
- * Get usage dashboard.
- *
- * Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
- */
+ /** Get a usage dashboard specified by workspaceId, accountId, and dashboard type. */
public GetBillingUsageDashboardResponse get(GetBillingUsageDashboardRequest request) {
return impl.get(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsService.java
index 47bcb4b94..832cfbc1a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsService.java
@@ -14,19 +14,11 @@
*/
@Generated
public interface UsageDashboardsService {
- /**
- * Create new usage dashboard.
- *
- * Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
- */
+ /** Create a usage dashboard specified by workspaceId, accountId, and dashboard type. */
CreateBillingUsageDashboardResponse create(
CreateBillingUsageDashboardRequest createBillingUsageDashboardRequest);
- /**
- * Get usage dashboard.
- *
- * Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
- */
+ /** Get a usage dashboard specified by workspaceId, accountId, and dashboard type. */
GetBillingUsageDashboardResponse get(
GetBillingUsageDashboardRequest getBillingUsageDashboardRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
index 68a2c8e08..1e898aaad 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
@@ -31,11 +31,7 @@ public void create(long workspaceId, String metastoreId) {
.setMetastoreId(metastoreId));
}
- /**
- * Assigns a workspace to a metastore.
- *
- * Creates an assignment to a metastore for a workspace
- */
+ /** Creates an assignment to a metastore for a workspace */
public void create(AccountsCreateMetastoreAssignment request) {
impl.create(request);
}
@@ -47,11 +43,7 @@ public void delete(long workspaceId, String metastoreId) {
.setMetastoreId(metastoreId));
}
- /**
- * Delete a metastore assignment.
- *
- * Deletes a metastore assignment to a workspace, leaving the workspace with no metastore.
- */
+ /** Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. */
public void delete(DeleteAccountMetastoreAssignmentRequest request) {
impl.delete(request);
}
@@ -61,10 +53,8 @@ public AccountsMetastoreAssignment get(long workspaceId) {
}
/**
- * Gets the metastore assignment for a workspace.
- *
- * Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace
- * is assigned a metastore, the mappig will be returned. If no metastore is assigned to the
+ * Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is
+ * assigned a metastore, the mappig will be returned. If no metastore is assigned to the
* workspace, the assignment will not be found and a 404 returned.
*/
public AccountsMetastoreAssignment get(GetAccountMetastoreAssignmentRequest request) {
@@ -75,11 +65,7 @@ public Iterable Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
- */
+ /** Gets a list of all Databricks workspace IDs that have been assigned to given metastore. */
public Iterable Updates an assignment to a metastore for a workspace. Currently, only the default catalog
- * may be updated.
+ * Updates an assignment to a metastore for a workspace. Currently, only the default catalog may
+ * be updated.
*/
public void update(AccountsUpdateMetastoreAssignment request) {
impl.update(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java
index f2fe33478..e1b89bd09 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java
@@ -12,43 +12,27 @@
*/
@Generated
public interface AccountMetastoreAssignmentsService {
- /**
- * Assigns a workspace to a metastore.
- *
- * Creates an assignment to a metastore for a workspace
- */
+ /** Creates an assignment to a metastore for a workspace */
void create(AccountsCreateMetastoreAssignment accountsCreateMetastoreAssignment);
- /**
- * Delete a metastore assignment.
- *
- * Deletes a metastore assignment to a workspace, leaving the workspace with no metastore.
- */
+ /** Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. */
void delete(DeleteAccountMetastoreAssignmentRequest deleteAccountMetastoreAssignmentRequest);
/**
- * Gets the metastore assignment for a workspace.
- *
- * Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace
- * is assigned a metastore, the mappig will be returned. If no metastore is assigned to the
+ * Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is
+ * assigned a metastore, the mappig will be returned. If no metastore is assigned to the
* workspace, the assignment will not be found and a 404 returned.
*/
AccountsMetastoreAssignment get(
GetAccountMetastoreAssignmentRequest getAccountMetastoreAssignmentRequest);
- /**
- * Get all workspaces assigned to a metastore.
- *
- * Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
- */
+ /** Gets a list of all Databricks workspace IDs that have been assigned to given metastore. */
ListAccountMetastoreAssignmentsResponse list(
ListAccountMetastoreAssignmentsRequest listAccountMetastoreAssignmentsRequest);
/**
- * Updates a metastore assignment to a workspaces.
- *
- * Updates an assignment to a metastore for a workspace. Currently, only the default catalog
- * may be updated.
+ * Updates an assignment to a metastore for a workspace. Currently, only the default catalog may
+ * be updated.
*/
void update(AccountsUpdateMetastoreAssignment accountsUpdateMetastoreAssignment);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
index 3ed7e76b4..0f95ce229 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
@@ -27,11 +27,7 @@ public AccountMetastoresAPI(AccountMetastoresService mock) {
impl = mock;
}
- /**
- * Create metastore.
- *
- * Creates a Unity Catalog metastore.
- */
+ /** Creates a Unity Catalog metastore. */
public AccountsMetastoreInfo create(AccountsCreateMetastore request) {
return impl.create(request);
}
@@ -40,11 +36,7 @@ public void delete(String metastoreId) {
delete(new DeleteAccountMetastoreRequest().setMetastoreId(metastoreId));
}
- /**
- * Delete a metastore.
- *
- * Deletes a Unity Catalog metastore for an account, both specified by ID.
- */
+ /** Deletes a Unity Catalog metastore for an account, both specified by ID. */
public void delete(DeleteAccountMetastoreRequest request) {
impl.delete(request);
}
@@ -53,20 +45,12 @@ public AccountsMetastoreInfo get(String metastoreId) {
return get(new GetAccountMetastoreRequest().setMetastoreId(metastoreId));
}
- /**
- * Get a metastore.
- *
- * Gets a Unity Catalog metastore from an account, both specified by ID.
- */
+ /** Gets a Unity Catalog metastore from an account, both specified by ID. */
public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) {
return impl.get(request);
}
- /**
- * Get all metastores associated with an account.
- *
- * Gets all Unity Catalog metastores associated with an account specified by ID.
- */
+ /** Gets all Unity Catalog metastores associated with an account specified by ID. */
public Iterable Updates an existing Unity Catalog metastore.
- */
+ /** Updates an existing Unity Catalog metastore. */
public AccountsMetastoreInfo update(AccountsUpdateMetastore request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java
index e1c20cb94..07e71499f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresService.java
@@ -13,38 +13,18 @@
*/
@Generated
public interface AccountMetastoresService {
- /**
- * Create metastore.
- *
- * Creates a Unity Catalog metastore.
- */
+ /** Creates a Unity Catalog metastore. */
AccountsMetastoreInfo create(AccountsCreateMetastore accountsCreateMetastore);
- /**
- * Delete a metastore.
- *
- * Deletes a Unity Catalog metastore for an account, both specified by ID.
- */
+ /** Deletes a Unity Catalog metastore for an account, both specified by ID. */
void delete(DeleteAccountMetastoreRequest deleteAccountMetastoreRequest);
- /**
- * Get a metastore.
- *
- * Gets a Unity Catalog metastore from an account, both specified by ID.
- */
+ /** Gets a Unity Catalog metastore from an account, both specified by ID. */
AccountsMetastoreInfo get(GetAccountMetastoreRequest getAccountMetastoreRequest);
- /**
- * Get all metastores associated with an account.
- *
- * Gets all Unity Catalog metastores associated with an account specified by ID.
- */
+ /** Gets all Unity Catalog metastores associated with an account specified by ID. */
ListMetastoresResponse list();
- /**
- * Update a metastore.
- *
- * Updates an existing Unity Catalog metastore.
- */
+ /** Updates an existing Unity Catalog metastore. */
AccountsMetastoreInfo update(AccountsUpdateMetastore accountsUpdateMetastore);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
index e8b5156f6..4b03ef8e0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
@@ -29,9 +29,7 @@ public AccountsStorageCredentialInfo create(String metastoreId) {
}
/**
- * Create a storage credential.
- *
- * Creates a new storage credential. The request object is specific to the cloud:
+ * Creates a new storage credential. The request object is specific to the cloud:
*
* * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
* **GcpServiceAcountKey** for GCP credentials.
@@ -51,9 +49,7 @@ public void delete(String metastoreId, String storageCredentialName) {
}
/**
- * Delete a storage credential.
- *
- * Deletes a storage credential from the metastore. The caller must be an owner of the storage
+ * Deletes a storage credential from the metastore. The caller must be an owner of the storage
* credential.
*/
public void delete(DeleteAccountStorageCredentialRequest request) {
@@ -68,10 +64,8 @@ public AccountsStorageCredentialInfo get(String metastoreId, String storageCrede
}
/**
- * Gets the named storage credential.
- *
- * Gets a storage credential from the metastore. The caller must be a metastore admin, the
- * owner of the storage credential, or have a level of privilege on the storage credential.
+ * Gets a storage credential from the metastore. The caller must be a metastore admin, the owner
+ * of the storage credential, or have a level of privilege on the storage credential.
*/
public AccountsStorageCredentialInfo get(GetAccountStorageCredentialRequest request) {
return impl.get(request);
@@ -81,11 +75,7 @@ public Iterable Gets a list of all storage credentials that have been assigned to given metastore.
- */
+ /** Gets a list of all storage credentials that have been assigned to given metastore. */
public Iterable Updates a storage credential on the metastore. The caller must be the owner of the storage
+ * Updates a storage credential on the metastore. The caller must be the owner of the storage
* credential. If the caller is a metastore admin, only the __owner__ credential can be changed.
*/
public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java
index 94384c354..2cceee11c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsService.java
@@ -13,9 +13,7 @@
@Generated
public interface AccountStorageCredentialsService {
/**
- * Create a storage credential.
- *
- * Creates a new storage credential. The request object is specific to the cloud:
+ * Creates a new storage credential. The request object is specific to the cloud:
*
* * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
* **GcpServiceAcountKey** for GCP credentials.
@@ -27,34 +25,24 @@ AccountsStorageCredentialInfo create(
AccountsCreateStorageCredential accountsCreateStorageCredential);
/**
- * Delete a storage credential.
- *
- * Deletes a storage credential from the metastore. The caller must be an owner of the storage
+ * Deletes a storage credential from the metastore. The caller must be an owner of the storage
* credential.
*/
void delete(DeleteAccountStorageCredentialRequest deleteAccountStorageCredentialRequest);
/**
- * Gets the named storage credential.
- *
- * Gets a storage credential from the metastore. The caller must be a metastore admin, the
- * owner of the storage credential, or have a level of privilege on the storage credential.
+ * Gets a storage credential from the metastore. The caller must be a metastore admin, the owner
+ * of the storage credential, or have a level of privilege on the storage credential.
*/
AccountsStorageCredentialInfo get(
GetAccountStorageCredentialRequest getAccountStorageCredentialRequest);
- /**
- * Get all storage credentials assigned to a metastore.
- *
- * Gets a list of all storage credentials that have been assigned to given metastore.
- */
+ /** Gets a list of all storage credentials that have been assigned to given metastore. */
ListAccountStorageCredentialsResponse list(
ListAccountStorageCredentialsRequest listAccountStorageCredentialsRequest);
/**
- * Updates a storage credential.
- *
- * Updates a storage credential on the metastore. The caller must be the owner of the storage
+ * Updates a storage credential on the metastore. The caller must be the owner of the storage
* credential. If the caller is a metastore admin, only the __owner__ credential can be changed.
*/
AccountsStorageCredentialInfo update(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java
index e26fe5946..b33143299 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsAPI.java
@@ -32,10 +32,8 @@ public ArtifactAllowlistInfo get(ArtifactType artifactType) {
}
/**
- * Get an artifact allowlist.
- *
- * Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin
- * or have the **MANAGE ALLOWLIST** privilege on the metastore.
+ * Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or
+ * have the **MANAGE ALLOWLIST** privilege on the metastore.
*/
public ArtifactAllowlistInfo get(GetArtifactAllowlistRequest request) {
return impl.get(request);
@@ -50,11 +48,9 @@ public ArtifactAllowlistInfo update(
}
/**
- * Set an artifact allowlist.
- *
- * Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is
- * replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE
- * ALLOWLIST** privilege on the metastore.
+ * Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced
+ * with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST**
+ * privilege on the metastore.
*/
public ArtifactAllowlistInfo update(SetArtifactAllowlist request) {
return impl.update(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java
index 2a56d3dd4..fc6d248df 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java
@@ -14,19 +14,15 @@
@Generated
public interface ArtifactAllowlistsService {
/**
- * Get an artifact allowlist.
- *
- * Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin
- * or have the **MANAGE ALLOWLIST** privilege on the metastore.
+ * Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or
+ * have the **MANAGE ALLOWLIST** privilege on the metastore.
*/
ArtifactAllowlistInfo get(GetArtifactAllowlistRequest getArtifactAllowlistRequest);
/**
- * Set an artifact allowlist.
- *
- * Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is
- * replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE
- * ALLOWLIST** privilege on the metastore.
+ * Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced
+ * with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST**
+ * privilege on the metastore.
*/
ArtifactAllowlistInfo update(SetArtifactAllowlist setArtifactAllowlist);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java
index 2eac19555..89e8914cf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java
@@ -7,9 +7,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The AWS IAM role configuration */
@Generated
public class AwsIamRoleRequest {
- /** The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access. */
+ /** The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials. */
@JsonProperty("role_arn")
private String roleArn;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java
index 50420d04a..1d7118f96 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java
@@ -7,13 +7,14 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The AWS IAM role configuration */
@Generated
public class AwsIamRoleResponse {
- /** The external ID used in role assumption to prevent confused deputy problem.. */
+ /** The external ID used in role assumption to prevent the confused deputy problem. */
@JsonProperty("external_id")
private String externalId;
- /** The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access. */
+ /** The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials. */
@JsonProperty("role_arn")
private String roleArn;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java
index be3997823..4cfcd3853 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java
@@ -17,11 +17,7 @@ public class AzureManagedIdentity {
@JsonProperty("access_connector_id")
private String accessConnectorId;
- /**
- * The Databricks internal ID that represents this managed identity. This field is only used to
- * persist the credential_id once it is fetched from the credentials manager - as we only use the
- * protobuf serializer to store credentials, this ID gets persisted to the database. .
- */
+ /** The Databricks internal ID that represents this managed identity. */
@JsonProperty("credential_id")
private String credentialId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java
index 36122a638..ea5519ae0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java
@@ -7,21 +7,22 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The Azure managed identity configuration. */
@Generated
public class AzureManagedIdentityRequest {
/**
* The Azure resource ID of the Azure Databricks Access Connector. Use the format
- * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.
+ * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.
*/
@JsonProperty("access_connector_id")
private String accessConnectorId;
/**
- * The Azure resource ID of the managed identity. Use the format
- * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}.
+ * The Azure resource ID of the managed identity. Use the format,
+ * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}`
* This is only available for user-assgined identities. For system-assigned identities, the
* access_connector_id is used to identify the identity. If this field is not provided, then we
- * assume the AzureManagedIdentity is for a system-assigned identity.
+ * assume the AzureManagedIdentity is using the system-assigned identity.
*/
@JsonProperty("managed_identity_id")
private String managedIdentityId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java
index 91fbf7d9c..21a8c5c51 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java
@@ -7,11 +7,12 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** The Azure managed identity configuration. */
@Generated
public class AzureManagedIdentityResponse {
/**
* The Azure resource ID of the Azure Databricks Access Connector. Use the format
- * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.
+ * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.
*/
@JsonProperty("access_connector_id")
private String accessConnectorId;
@@ -21,11 +22,11 @@ public class AzureManagedIdentityResponse {
private String credentialId;
/**
- * The Azure resource ID of the managed identity. Use the format
- * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}.
+ * The Azure resource ID of the managed identity. Use the format,
+ * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}`
* This is only available for user-assgined identities. For system-assigned identities, the
* access_connector_id is used to identify the identity. If this field is not provided, then we
- * assume the AzureManagedIdentity is for a system-assigned identity.
+ * assume the AzureManagedIdentity is using the system-assigned identity.
*/
@JsonProperty("managed_identity_id")
private String managedIdentityId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java
index 5f2408e0c..3d814c795 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Cancel refresh */
@Generated
public class CancelRefreshRequest {
/** ID of the refresh. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java
index 992b920ab..90db84a7f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java
@@ -13,5 +13,4 @@ public enum CatalogType {
MANAGED_CATALOG,
MANAGED_ONLINE_CATALOG,
SYSTEM_CATALOG,
- UNKNOWN_CATALOG_TYPE,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
index 744c57db6..770f4fdfb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
@@ -37,9 +37,7 @@ public CatalogInfo create(String name) {
}
/**
- * Create a catalog.
- *
- * Creates a new catalog instance in the parent metastore if the caller is a metastore admin or
+ * Creates a new catalog instance in the parent metastore if the caller is a metastore admin or
* has the **CREATE_CATALOG** privilege.
*/
public CatalogInfo create(CreateCatalog request) {
@@ -51,10 +49,8 @@ public void delete(String name) {
}
/**
- * Delete a catalog.
- *
- * Deletes the catalog that matches the supplied name. The caller must be a metastore admin or
- * the owner of the catalog.
+ * Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the
+ * owner of the catalog.
*/
public void delete(DeleteCatalogRequest request) {
impl.delete(request);
@@ -65,22 +61,18 @@ public CatalogInfo get(String name) {
}
/**
- * Get a catalog.
- *
- * Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner
- * of the catalog, or a user that has the **USE_CATALOG** privilege set for their account.
+ * Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of
+ * the catalog, or a user that has the **USE_CATALOG** privilege set for their account.
*/
public CatalogInfo get(GetCatalogRequest request) {
return impl.get(request);
}
/**
- * List catalogs.
- *
- * Gets an array of catalogs in the metastore. If the caller is the metastore admin, all
- * catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the
- * caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a
- * specific ordering of the elements in the array.
+ * Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs
+ * will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has
+ * the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering
+ * of the elements in the array.
*/
public Iterable Updates the catalog that matches the supplied name. The caller must be either the owner of
- * the catalog, or a metastore admin (when changing the owner field of the catalog).
+ * Updates the catalog that matches the supplied name. The caller must be either the owner of the
+ * catalog, or a metastore admin (when changing the owner field of the catalog).
*/
public CatalogInfo update(UpdateCatalog request) {
return impl.update(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
index 86f3a6b7d..870e321f2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
@@ -19,44 +19,34 @@
@Generated
public interface CatalogsService {
/**
- * Create a catalog.
- *
- * Creates a new catalog instance in the parent metastore if the caller is a metastore admin or
+ * Creates a new catalog instance in the parent metastore if the caller is a metastore admin or
* has the **CREATE_CATALOG** privilege.
*/
CatalogInfo create(CreateCatalog createCatalog);
/**
- * Delete a catalog.
- *
- * Deletes the catalog that matches the supplied name. The caller must be a metastore admin or
- * the owner of the catalog.
+ * Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the
+ * owner of the catalog.
*/
void delete(DeleteCatalogRequest deleteCatalogRequest);
/**
- * Get a catalog.
- *
- * Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner
- * of the catalog, or a user that has the **USE_CATALOG** privilege set for their account.
+ * Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of
+ * the catalog, or a user that has the **USE_CATALOG** privilege set for their account.
*/
CatalogInfo get(GetCatalogRequest getCatalogRequest);
/**
- * List catalogs.
- *
- * Gets an array of catalogs in the metastore. If the caller is the metastore admin, all
- * catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the
- * caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a
- * specific ordering of the elements in the array.
+ * Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs
+ * will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has
+ * the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering
+ * of the elements in the array.
*/
ListCatalogsResponse list(ListCatalogsRequest listCatalogsRequest);
/**
- * Update a catalog.
- *
- * Updates the catalog that matches the supplied name. The caller must be either the owner of
- * the catalog, or a metastore admin (when changing the owner field of the catalog).
+ * Updates the catalog that matches the supplied name. The caller must be either the owner of the
+ * catalog, or a metastore admin (when changing the owner field of the catalog).
*/
CatalogInfo update(UpdateCatalog updateCatalog);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java
index bcf1fe2c8..5be89d7ad 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java
@@ -7,17 +7,21 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/**
+ * The Cloudflare API token configuration. Read more at
+ * https://developers.cloudflare.com/r2/api/s3/tokens/
+ */
@Generated
public class CloudflareApiToken {
- /** The Cloudflare access key id of the token. */
+ /** The access key ID associated with the API token. */
@JsonProperty("access_key_id")
private String accessKeyId;
- /** The account id associated with the API token. */
+ /** The ID of the account associated with the API token. */
@JsonProperty("account_id")
private String accountId;
- /** The secret access token generated for the access key id */
+ /** The secret access token generated for the above access key ID. */
@JsonProperty("secret_access_key")
private String secretAccessKey;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionDependency.java
new file mode 100755
index 000000000..39a045978
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionDependency.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** A connection that is dependent on a SQL object. */
+@Generated
+public class ConnectionDependency {
+ /** Full name of the dependent connection, in the form of __connection_name__. */
+ @JsonProperty("connection_name")
+ private String connectionName;
+
+ public ConnectionDependency setConnectionName(String connectionName) {
+ this.connectionName = connectionName;
+ return this;
+ }
+
+ public String getConnectionName() {
+ return connectionName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ConnectionDependency that = (ConnectionDependency) o;
+ return Objects.equals(connectionName, that.connectionName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(connectionName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ConnectionDependency.class)
+ .add("connectionName", connectionName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
index b076b2ff4..0c5d5437b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Next Id: 31 */
+/** Next Id: 35 */
@Generated
public enum ConnectionType {
BIGQUERY,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
index ff8b2cda9..a3cc56631 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
@@ -42,9 +42,7 @@ public ConnectionInfo create(
}
/**
- * Create a connection.
- *
- * Creates a new connection
+ * Creates a new connection
*
* Creates a new connection to an external data source. It allows users to specify connection
* details and configurations for interaction with the external server.
@@ -57,11 +55,7 @@ public void delete(String name) {
delete(new DeleteConnectionRequest().setName(name));
}
- /**
- * Delete a connection.
- *
- * Deletes the connection that matches the supplied name.
- */
+ /** Deletes the connection that matches the supplied name. */
public void delete(DeleteConnectionRequest request) {
impl.delete(request);
}
@@ -70,20 +64,12 @@ public ConnectionInfo get(String name) {
return get(new GetConnectionRequest().setName(name));
}
- /**
- * Get a connection.
- *
- * Gets a connection from it's name.
- */
+ /** Gets a connection from it's name. */
public ConnectionInfo get(GetConnectionRequest request) {
return impl.get(request);
}
- /**
- * List connections.
- *
- * List all connections.
- */
+ /** List all connections. */
public Iterable Updates the connection that matches the supplied name.
- */
+ /** Updates the connection that matches the supplied name. */
public ConnectionInfo update(UpdateConnection request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java
index a1e882c38..45bea7e36 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java
@@ -21,40 +21,22 @@
@Generated
public interface ConnectionsService {
/**
- * Create a connection.
- *
- * Creates a new connection
+ * Creates a new connection
*
* Creates a new connection to an external data source. It allows users to specify connection
* details and configurations for interaction with the external server.
*/
ConnectionInfo create(CreateConnection createConnection);
- /**
- * Delete a connection.
- *
- * Deletes the connection that matches the supplied name.
- */
+ /** Deletes the connection that matches the supplied name. */
void delete(DeleteConnectionRequest deleteConnectionRequest);
- /**
- * Get a connection.
- *
- * Gets a connection from it's name.
- */
+ /** Gets a connection from it's name. */
ConnectionInfo get(GetConnectionRequest getConnectionRequest);
- /**
- * List connections.
- *
- * List all connections.
- */
+ /** List all connections. */
ListConnectionsResponse list(ListConnectionsRequest listConnectionsRequest);
- /**
- * Update a connection.
- *
- * Updates the connection that matches the supplied name.
- */
+ /** Updates the connection that matches the supplied name. */
ConnectionInfo update(UpdateConnection updateConnection);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java
index 023a3e68e..10ea0adeb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java
@@ -9,7 +9,7 @@
@Generated
public class CreateCredentialRequest {
- /** The AWS IAM role configuration */
+ /** The AWS IAM role configuration. */
@JsonProperty("aws_iam_role")
private AwsIamRole awsIamRole;
@@ -17,7 +17,7 @@ public class CreateCredentialRequest {
@JsonProperty("azure_managed_identity")
private AzureManagedIdentity azureManagedIdentity;
- /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */
+ /** The Azure service principal configuration. */
@JsonProperty("azure_service_principal")
private AzureServicePrincipal azureServicePrincipal;
@@ -25,7 +25,7 @@ public class CreateCredentialRequest {
@JsonProperty("comment")
private String comment;
- /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */
+ /** The Databricks managed GCP service account configuration. */
@JsonProperty("databricks_gcp_service_account")
private DatabricksGcpServiceAccount databricksGcpServiceAccount;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java
index 7f3a0730c..c58e4bd9e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create an Online Table */
@Generated
public class CreateOnlineTableRequest {
/** Online Table information. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java
index b462a9075..ffadcbe11 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java
@@ -33,11 +33,17 @@ public class CreateStorageCredential {
@JsonProperty("databricks_gcp_service_account")
private DatabricksGcpServiceAccountRequest databricksGcpServiceAccount;
- /** The credential name. The name must be unique within the metastore. */
+ /**
+ * The credential name. The name must be unique among storage and service credentials within the
+ * metastore.
+ */
@JsonProperty("name")
private String name;
- /** Whether the storage credential is only usable for read operations. */
+ /**
+ * Whether the credential is usable only for read operations. Only applicable when purpose is
+ * **STORAGE**.
+ */
@JsonProperty("read_only")
private Boolean readOnly;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialDependency.java
new file mode 100755
index 000000000..dffd56f22
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialDependency.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** A credential that is dependent on a SQL object. */
+@Generated
+public class CredentialDependency {
+ /** Full name of the dependent credential, in the form of __credential_name__. */
+ @JsonProperty("credential_name")
+ private String credentialName;
+
+ public CredentialDependency setCredentialName(String credentialName) {
+ this.credentialName = credentialName;
+ return this;
+ }
+
+ public String getCredentialName() {
+ return credentialName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CredentialDependency that = (CredentialDependency) o;
+ return Objects.equals(credentialName, that.credentialName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(credentialName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CredentialDependency.class)
+ .add("credentialName", credentialName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java
index 629f271e9..002c7de8e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java
@@ -9,7 +9,7 @@
@Generated
public class CredentialInfo {
- /** The AWS IAM role configuration */
+ /** The AWS IAM role configuration. */
@JsonProperty("aws_iam_role")
private AwsIamRole awsIamRole;
@@ -17,7 +17,7 @@ public class CredentialInfo {
@JsonProperty("azure_managed_identity")
private AzureManagedIdentity azureManagedIdentity;
- /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */
+ /** The Azure service principal configuration. */
@JsonProperty("azure_service_principal")
private AzureServicePrincipal azureServicePrincipal;
@@ -33,7 +33,7 @@ public class CredentialInfo {
@JsonProperty("created_by")
private String createdBy;
- /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */
+ /** The Databricks managed GCP service account configuration. */
@JsonProperty("databricks_gcp_service_account")
private DatabricksGcpServiceAccount databricksGcpServiceAccount;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java
index 0cfc214ba..499b626af 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java
@@ -37,10 +37,8 @@ public CredentialInfo createCredential(String name) {
}
/**
- * Create a credential.
- *
- * Creates a new credential. The type of credential to be created is determined by the
- * **purpose** field, which should be either **SERVICE** or **STORAGE**.
+ * Creates a new credential. The type of credential to be created is determined by the **purpose**
+ * field, which should be either **SERVICE** or **STORAGE**.
*
* The caller must be a metastore admin or have the metastore privilege
* **CREATE_STORAGE_CREDENTIAL** for storage credentials, or **CREATE_SERVICE_CREDENTIAL** for
@@ -55,10 +53,8 @@ public void deleteCredential(String nameArg) {
}
/**
- * Delete a credential.
- *
- * Deletes a service or storage credential from the metastore. The caller must be an owner of
- * the credential.
+ * Deletes a service or storage credential from the metastore. The caller must be an owner of the
+ * credential.
*/
public void deleteCredential(DeleteCredentialRequest request) {
impl.deleteCredential(request);
@@ -70,9 +66,7 @@ public TemporaryCredentials generateTemporaryServiceCredential(String credential
}
/**
- * Generate a temporary service credential.
- *
- * Returns a set of temporary credentials generated using the specified service credential. The
+ * Returns a set of temporary credentials generated using the specified service credential. The
* caller must be a metastore admin or have the metastore privilege **ACCESS** on the service
* credential.
*/
@@ -86,19 +80,15 @@ public CredentialInfo getCredential(String nameArg) {
}
/**
- * Get a credential.
- *
- * Gets a service or storage credential from the metastore. The caller must be a metastore
- * admin, the owner of the credential, or have any permission on the credential.
+ * Gets a service or storage credential from the metastore. The caller must be a metastore admin,
+ * the owner of the credential, or have any permission on the credential.
*/
public CredentialInfo getCredential(GetCredentialRequest request) {
return impl.getCredential(request);
}
/**
- * List credentials.
- *
- * Gets an array of credentials (as __CredentialInfo__ objects).
+ * Gets an array of credentials (as __CredentialInfo__ objects).
*
* The array is limited to only the credentials that the caller has permission to access. If
* the caller is a metastore admin, retrieval of credentials is unrestricted. There is no
@@ -123,9 +113,7 @@ public CredentialInfo updateCredential(String nameArg) {
}
/**
- * Update a credential.
- *
- * Updates a service or storage credential on the metastore.
+ * Updates a service or storage credential on the metastore.
*
* The caller must be the owner of the credential or a metastore admin or have the `MANAGE`
* permission. If the caller is a metastore admin, only the __owner__ field can be changed.
@@ -135,9 +123,7 @@ public CredentialInfo updateCredential(UpdateCredentialRequest request) {
}
/**
- * Validate a credential.
- *
- * Validates a credential.
+ * Validates a credential.
*
* For service credentials (purpose is **SERVICE**), either the __credential_name__ or the
* cloud-specific credential must be provided.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java
index 906445cf1..cbde97484 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java
@@ -19,10 +19,8 @@
@Generated
public interface CredentialsService {
/**
- * Create a credential.
- *
- * Creates a new credential. The type of credential to be created is determined by the
- * **purpose** field, which should be either **SERVICE** or **STORAGE**.
+ * Creates a new credential. The type of credential to be created is determined by the **purpose**
+ * field, which should be either **SERVICE** or **STORAGE**.
*
* The caller must be a metastore admin or have the metastore privilege
* **CREATE_STORAGE_CREDENTIAL** for storage credentials, or **CREATE_SERVICE_CREDENTIAL** for
@@ -31,17 +29,13 @@ public interface CredentialsService {
CredentialInfo createCredential(CreateCredentialRequest createCredentialRequest);
/**
- * Delete a credential.
- *
- * Deletes a service or storage credential from the metastore. The caller must be an owner of
- * the credential.
+ * Deletes a service or storage credential from the metastore. The caller must be an owner of the
+ * credential.
*/
void deleteCredential(DeleteCredentialRequest deleteCredentialRequest);
/**
- * Generate a temporary service credential.
- *
- * Returns a set of temporary credentials generated using the specified service credential. The
+ * Returns a set of temporary credentials generated using the specified service credential. The
* caller must be a metastore admin or have the metastore privilege **ACCESS** on the service
* credential.
*/
@@ -49,17 +43,13 @@ TemporaryCredentials generateTemporaryServiceCredential(
GenerateTemporaryServiceCredentialRequest generateTemporaryServiceCredentialRequest);
/**
- * Get a credential.
- *
- * Gets a service or storage credential from the metastore. The caller must be a metastore
- * admin, the owner of the credential, or have any permission on the credential.
+ * Gets a service or storage credential from the metastore. The caller must be a metastore admin,
+ * the owner of the credential, or have any permission on the credential.
*/
CredentialInfo getCredential(GetCredentialRequest getCredentialRequest);
/**
- * List credentials.
- *
- * Gets an array of credentials (as __CredentialInfo__ objects).
+ * Gets an array of credentials (as __CredentialInfo__ objects).
*
* The array is limited to only the credentials that the caller has permission to access. If
* the caller is a metastore admin, retrieval of credentials is unrestricted. There is no
@@ -68,9 +58,7 @@ TemporaryCredentials generateTemporaryServiceCredential(
ListCredentialsResponse listCredentials(ListCredentialsRequest listCredentialsRequest);
/**
- * Update a credential.
- *
- * Updates a service or storage credential on the metastore.
+ * Updates a service or storage credential on the metastore.
*
* The caller must be the owner of the credential or a metastore admin or have the `MANAGE`
* permission. If the caller is a metastore admin, only the __owner__ field can be changed.
@@ -78,9 +66,7 @@ TemporaryCredentials generateTemporaryServiceCredential(
CredentialInfo updateCredential(UpdateCredentialRequest updateCredentialRequest);
/**
- * Validate a credential.
- *
- * Validates a credential.
+ * Validates a credential.
*
* For service credentials (purpose is **SERVICE**), either the __credential_name__ or the
* cloud-specific credential must be provided.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java
index 1bafcc2c9..12e180f73 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java
@@ -11,21 +11,28 @@ public enum DataSourceFormat {
BIGQUERY_FORMAT,
CSV,
DATABRICKS_FORMAT,
+ DATABRICKS_ROW_STORE_FORMAT,
DELTA,
DELTASHARING,
- HIVE_CUSTOM,
- HIVE_SERDE,
+ DELTA_UNIFORM_HUDI,
+ DELTA_UNIFORM_ICEBERG,
+ HIVE,
+ ICEBERG,
JSON,
+ MONGODB_FORMAT,
MYSQL_FORMAT,
NETSUITE_FORMAT,
+ ORACLE_FORMAT,
ORC,
PARQUET,
POSTGRESQL_FORMAT,
REDSHIFT_FORMAT,
+ SALESFORCE_DATA_CLOUD_FORMAT,
SALESFORCE_FORMAT,
SNOWFLAKE_FORMAT,
SQLDW_FORMAT,
SQLSERVER_FORMAT,
+ TERADATA_FORMAT,
TEXT,
UNITY_CATALOG,
VECTOR_INDEX_FORMAT,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java
index 390833376..d80d50329 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java
@@ -10,11 +10,7 @@
/** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */
@Generated
public class DatabricksGcpServiceAccount {
- /**
- * The Databricks internal ID that represents this managed identity. This field is only used to
- * persist the credential_id once it is fetched from the credentials manager - as we only use the
- * protobuf serializer to store credentials, this ID gets persisted to the database
- */
+ /** The Databricks internal ID that represents this managed identity. */
@JsonProperty("credential_id")
private String credentialId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java
index 82c772aaf..89a55e796 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java
@@ -6,6 +6,7 @@
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
+/** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */
@Generated
public class DatabricksGcpServiceAccountRequest {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java
index 5ebfeb231..b9ab4ce59 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java
@@ -7,15 +7,14 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */
@Generated
public class DatabricksGcpServiceAccountResponse {
- /**
- * The Databricks internal ID that represents this service account. This is an output-only field.
- */
+ /** The Databricks internal ID that represents this managed identity. */
@JsonProperty("credential_id")
private String credentialId;
- /** The email of the service account. This is an output-only field. */
+ /** The email of the service account. */
@JsonProperty("email")
private String email;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java
index d0acb5c9c..cbe978421 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a metastore assignment */
@Generated
public class DeleteAccountMetastoreAssignmentRequest {
/** Unity Catalog metastore ID */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java
index 92f93166c..77de1e923 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a metastore */
@Generated
public class DeleteAccountMetastoreRequest {
/** Force deletion even if the metastore is not empty. Default is false. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java
index d0280e9fd..cbb248929 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a storage credential */
@Generated
public class DeleteAccountStorageCredentialRequest {
/** Force deletion even if the Storage Credential is not empty. Default is false. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java
index 393e8801f..18198fde3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Registered Model Alias */
@Generated
public class DeleteAliasRequest {
/** The name of the alias */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java
index 160821d70..1179c057c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a catalog */
@Generated
public class DeleteCatalogRequest {
/** Force deletion even if the catalog is not empty. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java
index b294cd3a3..785695780 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a connection */
@Generated
public class DeleteConnectionRequest {
/** The name of the connection to be deleted. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java
index 2a771f6bb..67d0cc835 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a credential */
@Generated
public class DeleteCredentialRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java
index ffdf3ba52..7ac754ac2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete an external location */
@Generated
public class DeleteExternalLocationRequest {
/** Force deletion even if there are dependent external tables or mounts. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java
index 1818ede51..3609ad11f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a function */
@Generated
public class DeleteFunctionRequest {
/** Force deletion even if the function is notempty. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java
index 3bc2a7251..53c204535 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a metastore */
@Generated
public class DeleteMetastoreRequest {
/** Force deletion even if the metastore is not empty. Default is false. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java
index 45da03a28..4ac19d9d1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Model Version */
@Generated
public class DeleteModelVersionRequest {
/** The three-level (fully qualified) name of the model version */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java
index 004f187b0..fedc583be 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete an Online Table */
@Generated
public class DeleteOnlineTableRequest {
/** Full three-part (catalog, schema, table) name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java
index 015df9599..04efaef61 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a table monitor */
@Generated
public class DeleteQualityMonitorRequest {
/** Full name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java
index b8117cce2..b85bd7c04 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Registered Model */
@Generated
public class DeleteRegisteredModelRequest {
/** The three-level (fully qualified) name of the registered model */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java
index c774720e9..067b55898 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a schema */
@Generated
public class DeleteSchemaRequest {
/** Force deletion even if the schema is not empty. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java
index c9837e254..8c23769e9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java
@@ -8,10 +8,12 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a credential */
@Generated
public class DeleteStorageCredentialRequest {
- /** Force deletion even if there are dependent external locations or external tables. */
+ /**
+ * Force an update even if there are dependent external locations or external tables (when purpose
+ * is **STORAGE**) or dependent services (when purpose is **SERVICE**).
+ */
@JsonIgnore
@QueryParam("force")
private Boolean force;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java
index db8a2c601..ee695f770 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a table constraint */
@Generated
public class DeleteTableConstraintRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java
index b4d371447..c9df2f67f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a table */
@Generated
public class DeleteTableRequest {
/** Full name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java
index 2fd732e7a..a9a63cab8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Volume */
@Generated
public class DeleteVolumeRequest {
/** The three-level (fully qualified) name of the volume */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java
index 71d02c997..5ace4876d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java
@@ -8,11 +8,19 @@
import java.util.Objects;
/**
- * A dependency of a SQL object. Either the __table__ field or the __function__ field must be
- * defined.
+ * A dependency of a SQL object. One of the following fields must be defined: __table__,
+ * __function__, __connection__, or __credential__.
*/
@Generated
public class Dependency {
+ /** A connection that is dependent on a SQL object. */
+ @JsonProperty("connection")
+ private ConnectionDependency connection;
+
+ /** A credential that is dependent on a SQL object. */
+ @JsonProperty("credential")
+ private CredentialDependency credential;
+
/** A function that is dependent on a SQL object. */
@JsonProperty("function")
private FunctionDependency function;
@@ -21,6 +29,24 @@ public class Dependency {
@JsonProperty("table")
private TableDependency table;
+ public Dependency setConnection(ConnectionDependency connection) {
+ this.connection = connection;
+ return this;
+ }
+
+ public ConnectionDependency getConnection() {
+ return connection;
+ }
+
+ public Dependency setCredential(CredentialDependency credential) {
+ this.credential = credential;
+ return this;
+ }
+
+ public CredentialDependency getCredential() {
+ return credential;
+ }
+
public Dependency setFunction(FunctionDependency function) {
this.function = function;
return this;
@@ -44,17 +70,22 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Dependency that = (Dependency) o;
- return Objects.equals(function, that.function) && Objects.equals(table, that.table);
+ return Objects.equals(connection, that.connection)
+ && Objects.equals(credential, that.credential)
+ && Objects.equals(function, that.function)
+ && Objects.equals(table, that.table);
}
@Override
public int hashCode() {
- return Objects.hash(function, table);
+ return Objects.hash(connection, credential, function, table);
}
@Override
public String toString() {
return new ToStringer(Dependency.class)
+ .add("connection", connection)
+ .add("credential", credential)
.add("function", function)
.add("table", table)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java
index 1e78370ba..5ff7d0818 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Disable a system schema */
@Generated
public class DisableRequest {
/** The metastore ID under which the system schema lives. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java
index 13b2d0b9f..8b5c60f03 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get boolean reflecting if table exists */
@Generated
public class ExistsRequest {
/** Full name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
index a5c5d698e..76a6f4582 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
@@ -41,10 +41,8 @@ public ExternalLocationInfo create(String name, String url, String credentialNam
}
/**
- * Create an external location.
- *
- * Creates a new external location entry in the metastore. The caller must be a metastore admin
- * or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated
+ * Creates a new external location entry in the metastore. The caller must be a metastore admin or
+ * have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated
* storage credential.
*/
public ExternalLocationInfo create(CreateExternalLocation request) {
@@ -56,10 +54,8 @@ public void delete(String name) {
}
/**
- * Delete an external location.
- *
- * Deletes the specified external location from the metastore. The caller must be the owner of
- * the external location.
+ * Deletes the specified external location from the metastore. The caller must be the owner of the
+ * external location.
*/
public void delete(DeleteExternalLocationRequest request) {
impl.delete(request);
@@ -70,21 +66,17 @@ public ExternalLocationInfo get(String name) {
}
/**
- * Get an external location.
- *
- * Gets an external location from the metastore. The caller must be either a metastore admin,
- * the owner of the external location, or a user that has some privilege on the external location.
+ * Gets an external location from the metastore. The caller must be either a metastore admin, the
+ * owner of the external location, or a user that has some privilege on the external location.
*/
public ExternalLocationInfo get(GetExternalLocationRequest request) {
return impl.get(request);
}
/**
- * List external locations.
- *
- * Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore.
- * The caller must be a metastore admin, the owner of the external location, or a user that has
- * some privilege on the external location. There is no guarantee of a specific ordering of the
+ * Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The
+ * caller must be a metastore admin, the owner of the external location, or a user that has some
+ * privilege on the external location. There is no guarantee of a specific ordering of the
* elements in the array.
*/
public Iterable Updates an external location in the metastore. The caller must be the owner of the external
+ * Updates an external location in the metastore. The caller must be the owner of the external
* location, or be a metastore admin. In the second case, the admin can only update the name of
* the external location.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
index ec6a4d48d..f42879409 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
@@ -22,44 +22,34 @@
@Generated
public interface ExternalLocationsService {
/**
- * Create an external location.
- *
- * Creates a new external location entry in the metastore. The caller must be a metastore admin
- * or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated
+ * Creates a new external location entry in the metastore. The caller must be a metastore admin or
+ * have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated
* storage credential.
*/
ExternalLocationInfo create(CreateExternalLocation createExternalLocation);
/**
- * Delete an external location.
- *
- * Deletes the specified external location from the metastore. The caller must be the owner of
- * the external location.
+ * Deletes the specified external location from the metastore. The caller must be the owner of the
+ * external location.
*/
void delete(DeleteExternalLocationRequest deleteExternalLocationRequest);
/**
- * Get an external location.
- *
- * Gets an external location from the metastore. The caller must be either a metastore admin,
- * the owner of the external location, or a user that has some privilege on the external location.
+ * Gets an external location from the metastore. The caller must be either a metastore admin, the
+ * owner of the external location, or a user that has some privilege on the external location.
*/
ExternalLocationInfo get(GetExternalLocationRequest getExternalLocationRequest);
/**
- * List external locations.
- *
- * Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore.
- * The caller must be a metastore admin, the owner of the external location, or a user that has
- * some privilege on the external location. There is no guarantee of a specific ordering of the
+ * Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The
+ * caller must be a metastore admin, the owner of the external location, or a user that has some
+ * privilege on the external location. There is no guarantee of a specific ordering of the
* elements in the array.
*/
ListExternalLocationsResponse list(ListExternalLocationsRequest listExternalLocationsRequest);
/**
- * Update an external location.
- *
- * Updates an external location in the metastore. The caller must be the owner of the external
+ * Updates an external location in the metastore. The caller must be the owner of the external
* location, or be a metastore admin. In the second case, the admin can only update the name of
* the external location.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java
index cfccffb7d..e17a54629 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java
@@ -26,6 +26,10 @@ public class ForeignKeyConstraint {
@JsonProperty("parent_table")
private String parentTable;
+ /** True if the constraint is RELY, false or unset if NORELY. */
+ @JsonProperty("rely")
+ private Boolean rely;
+
public ForeignKeyConstraint setChildColumns(Collection **WARNING: This API is experimental and will change in future versions**
+ * **WARNING: This API is experimental and will change in future versions**
*
* Creates a new function
*
@@ -55,13 +53,11 @@ public void delete(String name) {
}
/**
- * Delete a function.
- *
- * Deletes the function that matches the supplied name. For the deletion to succeed, the user
- * must satisfy one of the following conditions: - Is the owner of the function's parent catalog -
- * Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its
- * parent catalog - Is the owner of the function itself and have both the **USE_CATALOG**
- * privilege on its parent catalog and the **USE_SCHEMA** privilege on its parent schema
+ * Deletes the function that matches the supplied name. For the deletion to succeed, the user must
+ * satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is
+ * the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent
+ * catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on
+ * its parent catalog and the **USE_SCHEMA** privilege on its parent schema
*/
public void delete(DeleteFunctionRequest request) {
impl.delete(request);
@@ -72,9 +68,7 @@ public FunctionInfo get(String name) {
}
/**
- * Get a function.
- *
- * Gets a function from within a parent catalog and schema. For the fetch to succeed, the user
+ * Gets a function from within a parent catalog and schema. For the fetch to succeed, the user
* must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the
* function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog
* and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent
@@ -90,9 +84,7 @@ public Iterable List functions within the specified parent catalog and schema. If the user is a metastore
+ * List functions within the specified parent catalog and schema. If the user is a metastore
* admin, all functions are returned in the output list. Otherwise, the user must have the
* **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and
* the output list contains only functions for which either the user has the **EXECUTE** privilege
@@ -118,9 +110,7 @@ public FunctionInfo update(String name) {
}
/**
- * Update a function.
- *
- * Updates the function that matches the supplied name. Only the owner of the function can be
+ * Updates the function that matches the supplied name. Only the owner of the function can be
* updated. If the user is not a metastore admin, the user must be a member of the group that is
* the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog
* - Is the owner of the function's parent schema and has the **USE_CATALOG** privilege on its
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
index 813657540..a88771087 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
@@ -18,9 +18,7 @@
@Generated
public interface FunctionsService {
/**
- * Create a function.
- *
- * **WARNING: This API is experimental and will change in future versions**
+ * **WARNING: This API is experimental and will change in future versions**
*
* Creates a new function
*
@@ -31,20 +29,16 @@ public interface FunctionsService {
FunctionInfo create(CreateFunctionRequest createFunctionRequest);
/**
- * Delete a function.
- *
- * Deletes the function that matches the supplied name. For the deletion to succeed, the user
- * must satisfy one of the following conditions: - Is the owner of the function's parent catalog -
- * Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its
- * parent catalog - Is the owner of the function itself and have both the **USE_CATALOG**
- * privilege on its parent catalog and the **USE_SCHEMA** privilege on its parent schema
+ * Deletes the function that matches the supplied name. For the deletion to succeed, the user must
+ * satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is
+ * the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent
+ * catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on
+ * its parent catalog and the **USE_SCHEMA** privilege on its parent schema
*/
void delete(DeleteFunctionRequest deleteFunctionRequest);
/**
- * Get a function.
- *
- * Gets a function from within a parent catalog and schema. For the fetch to succeed, the user
+ * Gets a function from within a parent catalog and schema. For the fetch to succeed, the user
* must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the
* function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog
* and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent
@@ -54,9 +48,7 @@ public interface FunctionsService {
FunctionInfo get(GetFunctionRequest getFunctionRequest);
/**
- * List functions.
- *
- * List functions within the specified parent catalog and schema. If the user is a metastore
+ * List functions within the specified parent catalog and schema. If the user is a metastore
* admin, all functions are returned in the output list. Otherwise, the user must have the
* **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and
* the output list contains only functions for which either the user has the **EXECUTE** privilege
@@ -66,9 +58,7 @@ public interface FunctionsService {
ListFunctionsResponse list(ListFunctionsRequest listFunctionsRequest);
/**
- * Update a function.
- *
- * Updates the function that matches the supplied name. Only the owner of the function can be
+ * Updates the function that matches the supplied name. Only the owner of the function can be
* updated. If the user is not a metastore admin, the user must be a member of the group that is
* the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog
* - Is the owner of the function's parent schema and has the **USE_CATALOG** privilege on its
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java
index 6d9d03e93..5190cab6b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Gets the metastore assignment for a workspace */
@Generated
public class GetAccountMetastoreAssignmentRequest {
/** Workspace ID. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java
index e4aa3900a..1fd3ffdb4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a metastore */
@Generated
public class GetAccountMetastoreRequest {
/** Unity Catalog metastore ID */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java
index 47d0fd89e..29c20c2fe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Gets the named storage credential */
@Generated
public class GetAccountStorageCredentialRequest {
/** Unity Catalog metastore ID */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java
index 0a910bdc7..badfea63a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get an artifact allowlist */
@Generated
public class GetArtifactAllowlistRequest {
/** The artifact type of the allowlist. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java
index f0fad5bb6..b35492f54 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get securable workspace bindings */
@Generated
public class GetBindingsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java
index 9f967bcb0..2db5d46d1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get Model Version By Alias */
@Generated
public class GetByAliasRequest {
/** The name of the alias */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java
index 82fe3ec30..75a5c6596 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a catalog */
@Generated
public class GetCatalogRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java
index 153f54518..0214b47a4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a connection */
@Generated
public class GetConnectionRequest {
/** Name of the connection. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java
index cfb1de4fe..fbba91cf2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a credential */
@Generated
public class GetCredentialRequest {
/** Name of the credential. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java
index 570aaa00a..01b5ed191 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get effective permissions */
@Generated
public class GetEffectiveRequest {
/** Full name of securable. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java
index 8ac782a05..5158a373c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get an external location */
@Generated
public class GetExternalLocationRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java
index 13d1d7bfd..b635d3ab3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a function */
@Generated
public class GetFunctionRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
index 8dfca3ffc..59e2565c2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get permissions */
@Generated
public class GetGrantRequest {
/** Full name of securable. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java
index 7dde48bd5..95ba55160 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a metastore */
@Generated
public class GetMetastoreRequest {
/** Unique ID of the metastore. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java
index 44e63c4ae..a591c72ad 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a Model Version */
@Generated
public class GetModelVersionRequest {
/** The three-level (fully qualified) name of the model version */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java
index 757e31dc6..53f515c61 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get an Online Table */
@Generated
public class GetOnlineTableRequest {
/** Full three-part (catalog, schema, table) name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java
index 66a002053..36ab111f9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a table monitor */
@Generated
public class GetQualityMonitorRequest {
/** Full name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java
index 1b577aedf..5f8a02fa3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get information for a single resource quota. */
@Generated
public class GetQuotaRequest {
/** Full name of the parent resource. Provide the metastore ID if the parent is a metastore. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java
index ef0b7ac1b..1e4a8c5ee 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get refresh */
@Generated
public class GetRefreshRequest {
/** ID of the refresh. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java
index 7ad6ffcdf..cdca47e99 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a Registered Model */
@Generated
public class GetRegisteredModelRequest {
/** The three-level (fully qualified) name of the registered model */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java
index 11123dd34..02d0517e8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a schema */
@Generated
public class GetSchemaRequest {
/** Full name of the schema. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java
index 6cf81d031..e2c22ef20 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a credential */
@Generated
public class GetStorageCredentialRequest {
/** Name of the storage credential. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
index 19d8e5f0e..fea768af7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a table */
@Generated
public class GetTableRequest {
/** Full name of the table. */
@@ -16,7 +15,7 @@ public class GetTableRequest {
/**
* Whether to include tables in the response for which the principal can only access selective
- * metadata for
+ * metadata for.
*/
@JsonIgnore
@QueryParam("include_browse")
@@ -27,7 +26,7 @@ public class GetTableRequest {
@QueryParam("include_delta_metadata")
private Boolean includeDeltaMetadata;
- /** Whether to include a manifest containing capabilities the table has. */
+ /** Whether to include a manifest containing table capabilities in the response. */
@JsonIgnore
@QueryParam("include_manifest_capabilities")
private Boolean includeManifestCapabilities;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java
index f5e09dfe9..192eead03 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get catalog workspace bindings */
@Generated
public class GetWorkspaceBindingRequest {
/** The name of the catalog. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java
index 84982e379..59f887de6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java
@@ -37,11 +37,7 @@ public GetPermissionsResponse get(String securableType, String fullName) {
return get(new GetGrantRequest().setSecurableType(securableType).setFullName(fullName));
}
- /**
- * Get permissions.
- *
- * Gets the permissions for a securable. Does not include inherited permissions.
- */
+ /** Gets the permissions for a securable. Does not include inherited permissions. */
public GetPermissionsResponse get(GetGrantRequest request) {
return impl.get(request);
}
@@ -52,10 +48,8 @@ public EffectivePermissionsList getEffective(String securableType, String fullNa
}
/**
- * Get effective permissions.
- *
- * Gets the effective permissions for a securable. Includes inherited permissions from any
- * parent securables.
+ * Gets the effective permissions for a securable. Includes inherited permissions from any parent
+ * securables.
*/
public EffectivePermissionsList getEffective(GetEffectiveRequest request) {
return impl.getEffective(request);
@@ -65,11 +59,7 @@ public UpdatePermissionsResponse update(String securableType, String fullName) {
return update(new UpdatePermissions().setSecurableType(securableType).setFullName(fullName));
}
- /**
- * Update permissions.
- *
- * Updates the permissions for a securable.
- */
+ /** Updates the permissions for a securable. */
public UpdatePermissionsResponse update(UpdatePermissions request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java
index 275b60772..777f9040f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java
@@ -20,25 +20,15 @@
*/
@Generated
public interface GrantsService {
- /**
- * Get permissions.
- *
- * Gets the permissions for a securable. Does not include inherited permissions.
- */
+ /** Gets the permissions for a securable. Does not include inherited permissions. */
GetPermissionsResponse get(GetGrantRequest getGrantRequest);
/**
- * Get effective permissions.
- *
- * Gets the effective permissions for a securable. Includes inherited permissions from any
- * parent securables.
+ * Gets the effective permissions for a securable. Includes inherited permissions from any parent
+ * securables.
*/
EffectivePermissionsList getEffective(GetEffectiveRequest getEffectiveRequest);
- /**
- * Update permissions.
- *
- * Updates the permissions for a securable.
- */
+ /** Updates the permissions for a securable. */
UpdatePermissionsResponse update(UpdatePermissions updatePermissions);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java
index fad85ca91..b7f8bcd64 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get all workspaces assigned to a metastore */
@Generated
public class ListAccountMetastoreAssignmentsRequest {
/** Unity Catalog metastore ID */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java
index b38c46a54..6e21b9d0f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get all storage credentials assigned to a metastore */
@Generated
public class ListAccountStorageCredentialsRequest {
/** Unity Catalog metastore ID */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
index cb6be8a63..05fe12886 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List catalogs */
@Generated
public class ListCatalogsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java
index 2627b4aef..4aae35586 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List connections */
@Generated
public class ListConnectionsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java
index 775a697e7..32dfc1888 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List credentials */
@Generated
public class ListCredentialsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
index 35c71f7a1..ce3805d49 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List external locations */
@Generated
public class ListExternalLocationsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
index 039752d25..4e7cf6ea1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List functions */
@Generated
public class ListFunctionsRequest {
/** Name of parent catalog for functions of interest. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java
index cc722491d..94e5514e1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List metastores */
@Generated
public class ListMetastoresRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
index 6364b1653..5c7f3ccf0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List Model Versions */
@Generated
public class ListModelVersionsRequest {
/** The full three-level name of the registered model under which to list model versions */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java
index 0fa5831ae..55d224eac 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListQuotasRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List all resource quotas under a metastore. */
@Generated
public class ListQuotasRequest {
/** The number of quotas to return. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java
index b45a0442c..51c0d0474 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List refreshes */
@Generated
public class ListRefreshesRequest {
/** Full name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java
index f319d1ed7..1d5411f69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List Registered Models */
@Generated
public class ListRegisteredModelsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
index 965bcf41d..5fe266596 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List schemas */
@Generated
public class ListSchemasRequest {
/** Parent catalog for schemas of interest. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
index 75755ef16..c91be8012 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List credentials */
@Generated
public class ListStorageCredentialsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
index 0aa5904fb..774820505 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List table summaries */
@Generated
public class ListSummariesRequest {
/** Name of parent catalog for tables of interest. */
@@ -16,7 +15,7 @@ public class ListSummariesRequest {
@QueryParam("catalog_name")
private String catalogName;
- /** Whether to include a manifest containing capabilities the table has. */
+ /** Whether to include a manifest containing table capabilities in the response. */
@JsonIgnore
@QueryParam("include_manifest_capabilities")
private Boolean includeManifestCapabilities;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java
index b48792337..bba8bfa7c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List system schemas */
@Generated
public class ListSystemSchemasRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
index f5d9fd2f8..f63d165d6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List tables */
@Generated
public class ListTablesRequest {
/** Name of parent catalog for tables of interest. */
@@ -18,18 +17,13 @@ public class ListTablesRequest {
/**
* Whether to include tables in the response for which the principal can only access selective
- * metadata for
+ * metadata for.
*/
@JsonIgnore
@QueryParam("include_browse")
private Boolean includeBrowse;
- /** Whether delta metadata should be included in the response. */
- @JsonIgnore
- @QueryParam("include_delta_metadata")
- private Boolean includeDeltaMetadata;
-
- /** Whether to include a manifest containing capabilities the table has. */
+ /** Whether to include a manifest containing table capabilities in the response. */
@JsonIgnore
@QueryParam("include_manifest_capabilities")
private Boolean includeManifestCapabilities;
@@ -90,15 +84,6 @@ public Boolean getIncludeBrowse() {
return includeBrowse;
}
- public ListTablesRequest setIncludeDeltaMetadata(Boolean includeDeltaMetadata) {
- this.includeDeltaMetadata = includeDeltaMetadata;
- return this;
- }
-
- public Boolean getIncludeDeltaMetadata() {
- return includeDeltaMetadata;
- }
-
public ListTablesRequest setIncludeManifestCapabilities(Boolean includeManifestCapabilities) {
this.includeManifestCapabilities = includeManifestCapabilities;
return this;
@@ -169,7 +154,6 @@ public boolean equals(Object o) {
ListTablesRequest that = (ListTablesRequest) o;
return Objects.equals(catalogName, that.catalogName)
&& Objects.equals(includeBrowse, that.includeBrowse)
- && Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata)
&& Objects.equals(includeManifestCapabilities, that.includeManifestCapabilities)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(omitColumns, that.omitColumns)
@@ -184,7 +168,6 @@ public int hashCode() {
return Objects.hash(
catalogName,
includeBrowse,
- includeDeltaMetadata,
includeManifestCapabilities,
maxResults,
omitColumns,
@@ -199,7 +182,6 @@ public String toString() {
return new ToStringer(ListTablesRequest.class)
.add("catalogName", catalogName)
.add("includeBrowse", includeBrowse)
- .add("includeDeltaMetadata", includeDeltaMetadata)
.add("includeManifestCapabilities", includeManifestCapabilities)
.add("maxResults", maxResults)
.add("omitColumns", omitColumns)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java
index f7126ee0e..5c2502a3a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List Volumes */
@Generated
public class ListVolumesRequest {
/** The identifier of the catalog */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
index f468b89cc..3a1bb115e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
@@ -45,11 +45,9 @@ public void assign(long workspaceId, String metastoreId, String defaultCatalogNa
}
/**
- * Create an assignment.
- *
- * Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists,
- * it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller
- * must be an account admin.
+ * Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it
+ * will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must
+ * be an account admin.
*/
public void assign(CreateMetastoreAssignment request) {
impl.assign(request);
@@ -60,10 +58,8 @@ public MetastoreInfo create(String name) {
}
/**
- * Create a metastore.
- *
- * Creates a new metastore based on a provided name and optional storage root path. By default
- * (if the __owner__ field is not set), the owner of the new metastore is the user calling the
+ * Creates a new metastore based on a provided name and optional storage root path. By default (if
+ * the __owner__ field is not set), the owner of the new metastore is the user calling the
* __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the
* ownership is assigned to the System User instead.
*/
@@ -71,11 +67,7 @@ public MetastoreInfo create(CreateMetastore request) {
return impl.create(request);
}
- /**
- * Get metastore assignment for workspace.
- *
- * Gets the metastore assignment for the workspace being accessed.
- */
+ /** Gets the metastore assignment for the workspace being accessed. */
public MetastoreAssignment current() {
return impl.current();
}
@@ -84,11 +76,7 @@ public void delete(String id) {
delete(new DeleteMetastoreRequest().setId(id));
}
- /**
- * Delete a metastore.
- *
- * Deletes a metastore. The caller must be a metastore admin.
- */
+ /** Deletes a metastore. The caller must be a metastore admin. */
public void delete(DeleteMetastoreRequest request) {
impl.delete(request);
}
@@ -98,20 +86,16 @@ public MetastoreInfo get(String id) {
}
/**
- * Get a metastore.
- *
- * Gets a metastore that matches the supplied ID. The caller must be a metastore admin to
- * retrieve this info.
+ * Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve
+ * this info.
*/
public MetastoreInfo get(GetMetastoreRequest request) {
return impl.get(request);
}
/**
- * List metastores.
- *
- * Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be
- * an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in
+ * Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an
+ * admin to retrieve this info. There is no guarantee of a specific ordering of the elements in
* the array.
*/
public Iterable Gets information about a metastore. This summary includes the storage credential, the cloud
+ * Gets information about a metastore. This summary includes the storage credential, the cloud
* vendor, the cloud region, and the global metastore ID.
*/
public GetMetastoreSummaryResponse summary() {
@@ -142,11 +124,7 @@ public void unassign(long workspaceId, String metastoreId) {
unassign(new UnassignRequest().setWorkspaceId(workspaceId).setMetastoreId(metastoreId));
}
- /**
- * Delete an assignment.
- *
- * Deletes a metastore assignment. The caller must be an account administrator.
- */
+ /** Deletes a metastore assignment. The caller must be an account administrator. */
public void unassign(UnassignRequest request) {
impl.unassign(request);
}
@@ -156,9 +134,7 @@ public MetastoreInfo update(String id) {
}
/**
- * Update a metastore.
- *
- * Updates information for a specific metastore. The caller must be a metastore admin. If the
+ * Updates information for a specific metastore. The caller must be a metastore admin. If the
* __owner__ field is set to the empty string (**""**), the ownership is updated to the System
* User.
*/
@@ -171,9 +147,7 @@ public void updateAssignment(long workspaceId) {
}
/**
- * Update an assignment.
- *
- * Updates a metastore assignment. This operation can be used to update __metastore_id__ or
+ * Updates a metastore assignment. This operation can be used to update __metastore_id__ or
* __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a
* metastore. The caller must be an account admin to update __metastore_id__; otherwise, the
* caller can be a Workspace admin.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java
index bfad43db3..2f6f582ca 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java
@@ -23,83 +23,57 @@
@Generated
public interface MetastoresService {
/**
- * Create an assignment.
- *
- * Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists,
- * it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller
- * must be an account admin.
+ * Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it
+ * will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must
+ * be an account admin.
*/
void assign(CreateMetastoreAssignment createMetastoreAssignment);
/**
- * Create a metastore.
- *
- * Creates a new metastore based on a provided name and optional storage root path. By default
- * (if the __owner__ field is not set), the owner of the new metastore is the user calling the
+ * Creates a new metastore based on a provided name and optional storage root path. By default (if
+ * the __owner__ field is not set), the owner of the new metastore is the user calling the
* __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the
* ownership is assigned to the System User instead.
*/
MetastoreInfo create(CreateMetastore createMetastore);
- /**
- * Get metastore assignment for workspace.
- *
- * Gets the metastore assignment for the workspace being accessed.
- */
+ /** Gets the metastore assignment for the workspace being accessed. */
MetastoreAssignment current();
- /**
- * Delete a metastore.
- *
- * Deletes a metastore. The caller must be a metastore admin.
- */
+ /** Deletes a metastore. The caller must be a metastore admin. */
void delete(DeleteMetastoreRequest deleteMetastoreRequest);
/**
- * Get a metastore.
- *
- * Gets a metastore that matches the supplied ID. The caller must be a metastore admin to
- * retrieve this info.
+ * Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve
+ * this info.
*/
MetastoreInfo get(GetMetastoreRequest getMetastoreRequest);
/**
- * List metastores.
- *
- * Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be
- * an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in
+ * Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an
+ * admin to retrieve this info. There is no guarantee of a specific ordering of the elements in
* the array.
*/
ListMetastoresResponse list(ListMetastoresRequest listMetastoresRequest);
/**
- * Get a metastore summary.
- *
- * Gets information about a metastore. This summary includes the storage credential, the cloud
+ * Gets information about a metastore. This summary includes the storage credential, the cloud
* vendor, the cloud region, and the global metastore ID.
*/
GetMetastoreSummaryResponse summary();
- /**
- * Delete an assignment.
- *
- * Deletes a metastore assignment. The caller must be an account administrator.
- */
+ /** Deletes a metastore assignment. The caller must be an account administrator. */
void unassign(UnassignRequest unassignRequest);
/**
- * Update a metastore.
- *
- * Updates information for a specific metastore. The caller must be a metastore admin. If the
+ * Updates information for a specific metastore. The caller must be a metastore admin. If the
* __owner__ field is set to the empty string (**""**), the ownership is updated to the System
* User.
*/
MetastoreInfo update(UpdateMetastore updateMetastore);
/**
- * Update an assignment.
- *
- * Updates a metastore assignment. This operation can be used to update __metastore_id__ or
+ * Updates a metastore assignment. This operation can be used to update __metastore_id__ or
* __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a
* metastore. The caller must be an account admin to update __metastore_id__; otherwise, the
* caller can be a Workspace admin.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java
index affc50e41..17978f1f7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java
@@ -36,10 +36,8 @@ public void delete(String fullName, long version) {
}
/**
- * Delete a Model Version.
- *
- * Deletes a model version from the specified registered model. Any aliases assigned to the
- * model version will also be deleted.
+ * Deletes a model version from the specified registered model. Any aliases assigned to the model
+ * version will also be deleted.
*
* The caller must be a metastore admin or an owner of the parent registered model. For the
* latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the
@@ -54,9 +52,7 @@ public ModelVersionInfo get(String fullName, long version) {
}
/**
- * Get a Model Version.
- *
- * Get a model version.
+ * Get a model version.
*
* The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on)
* the parent registered model. For the latter case, the caller must also be the owner or have the
@@ -72,9 +68,7 @@ public ModelVersionInfo getByAlias(String fullName, String alias) {
}
/**
- * Get Model Version By Alias.
- *
- * Get a model version by alias.
+ * Get a model version by alias.
*
* The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on)
* the registered model. For the latter case, the caller must also be the owner or have the
@@ -90,10 +84,8 @@ public Iterable List model versions. You can list model versions under a particular schema, or list all
- * model versions in the current metastore.
+ * List model versions. You can list model versions under a particular schema, or list all model
+ * versions in the current metastore.
*
* The returned models are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the model versions. A regular user needs to be the
@@ -124,9 +116,7 @@ public ModelVersionInfo update(String fullName, long version) {
}
/**
- * Update a Model Version.
- *
- * Updates the specified model version.
+ * Updates the specified model version.
*
* The caller must be a metastore admin or an owner of the parent registered model. For the
* latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsService.java
index aeccdd832..d79761b40 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsService.java
@@ -18,10 +18,8 @@
@Generated
public interface ModelVersionsService {
/**
- * Delete a Model Version.
- *
- * Deletes a model version from the specified registered model. Any aliases assigned to the
- * model version will also be deleted.
+ * Deletes a model version from the specified registered model. Any aliases assigned to the model
+ * version will also be deleted.
*
* The caller must be a metastore admin or an owner of the parent registered model. For the
* latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the
@@ -30,9 +28,7 @@ public interface ModelVersionsService {
void delete(DeleteModelVersionRequest deleteModelVersionRequest);
/**
- * Get a Model Version.
- *
- * Get a model version.
+ * Get a model version.
*
* The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on)
* the parent registered model. For the latter case, the caller must also be the owner or have the
@@ -42,9 +38,7 @@ public interface ModelVersionsService {
ModelVersionInfo get(GetModelVersionRequest getModelVersionRequest);
/**
- * Get Model Version By Alias.
- *
- * Get a model version by alias.
+ * Get a model version by alias.
*
* The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on)
* the registered model. For the latter case, the caller must also be the owner or have the
@@ -54,10 +48,8 @@ public interface ModelVersionsService {
ModelVersionInfo getByAlias(GetByAliasRequest getByAliasRequest);
/**
- * List Model Versions.
- *
- * List model versions. You can list model versions under a particular schema, or list all
- * model versions in the current metastore.
+ * List model versions. You can list model versions under a particular schema, or list all model
+ * versions in the current metastore.
*
* The returned models are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the model versions. A regular user needs to be the
@@ -72,9 +64,7 @@ public interface ModelVersionsService {
ListModelVersionsResponse list(ListModelVersionsRequest listModelVersionsRequest);
/**
- * Update a Model Version.
- *
- * Updates the specified model version.
+ * Updates the specified model version.
*
* The caller must be a metastore admin or an owner of the parent registered model. For the
* latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
index 30ca3e788..e1d44eb38 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
@@ -79,11 +79,7 @@ public Wait Create a new Online Table.
- */
+ /** Create a new Online Table. */
public Wait Delete an online table. Warning: This will delete all the data in the online table. If the
+ * Delete an online table. Warning: This will delete all the data in the online table. If the
* source Delta table was deleted or modified since this Online Table was created, this will lose
* the data forever!
*/
@@ -110,11 +104,7 @@ public OnlineTable get(String name) {
return get(new GetOnlineTableRequest().setName(name));
}
- /**
- * Get an Online Table.
- *
- * Get information about an existing online table and its status.
- */
+ /** Get information about an existing online table and its status. */
public OnlineTable get(GetOnlineTableRequest request) {
return impl.get(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java
index e18d13cd0..7b122438f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java
@@ -12,26 +12,16 @@
*/
@Generated
public interface OnlineTablesService {
- /**
- * Create an Online Table.
- *
- * Create a new Online Table.
- */
+ /** Create a new Online Table. */
OnlineTable create(CreateOnlineTableRequest createOnlineTableRequest);
/**
- * Delete an Online Table.
- *
- * Delete an online table. Warning: This will delete all the data in the online table. If the
+ * Delete an online table. Warning: This will delete all the data in the online table. If the
* source Delta table was deleted or modified since this Online Table was created, this will lose
* the data forever!
*/
void delete(DeleteOnlineTableRequest deleteOnlineTableRequest);
- /**
- * Get an Online Table.
- *
- * Get information about an existing online table and its status.
- */
+ /** Get information about an existing online table and its status. */
OnlineTable get(GetOnlineTableRequest getOnlineTableRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpec.java
new file mode 100755
index 000000000..8fc752e5d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpec.java
@@ -0,0 +1,268 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * Spec of an allowed option on a securable kind and its attributes. This is mostly used by UI to
+ * provide user friendly hints and descriptions in order to facilitate the securable creation
+ * process.
+ */
+@Generated
+public class OptionSpec {
+ /**
+ * For drop down / radio button selections, UI will want to know the possible input values, it can
+ * also be used by other option types to limit input selections.
+ */
+ @JsonProperty("allowed_values")
+ private Collection Cancel an active monitor refresh for the given refresh ID.
+ * Cancel an active monitor refresh for the given refresh ID.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -60,9 +58,7 @@ public MonitorInfo create(String tableName, String assetsDir, String outputSchem
}
/**
- * Create a table monitor.
- *
- * Creates a new monitor for the specified table.
+ * Creates a new monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on
* the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on
@@ -82,9 +78,7 @@ public void delete(String tableName) {
}
/**
- * Delete a table monitor.
- *
- * Deletes a monitor for the specified table.
+ * Deletes a monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -105,9 +99,7 @@ public MonitorInfo get(String tableName) {
}
/**
- * Get a table monitor.
- *
- * Gets a monitor for the specified table.
+ * Gets a monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema. 3. have the
@@ -127,9 +119,7 @@ public MonitorRefreshInfo getRefresh(String tableName, String refreshId) {
}
/**
- * Get refresh.
- *
- * Gets info about a specific monitor refresh using the given refresh ID.
+ * Gets info about a specific monitor refresh using the given refresh ID.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -147,9 +137,7 @@ public MonitorRefreshListResponse listRefreshes(String tableName) {
}
/**
- * List refreshes.
- *
- * Gets an array containing the history of the most recent refreshes (up to 25) for this table.
+ * Gets an array containing the history of the most recent refreshes (up to 25) for this table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -167,9 +155,7 @@ public RegenerateDashboardResponse regenerateDashboard(String tableName) {
}
/**
- * Regenerate a monitoring dashboard.
- *
- * Regenerates the monitoring dashboard for the specified table.
+ * Regenerates the monitoring dashboard for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -188,10 +174,8 @@ public MonitorRefreshInfo runRefresh(String tableName) {
}
/**
- * Queue a metric refresh for a monitor.
- *
- * Queues a metric refresh on the monitor for the specified table. The refresh will execute in
- * the background.
+ * Queues a metric refresh on the monitor for the specified table. The refresh will execute in the
+ * background.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -210,9 +194,7 @@ public MonitorInfo update(String tableName, String outputSchemaName) {
}
/**
- * Update a table monitor.
- *
- * Updates a monitor for the specified table.
+ * Updates a monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsService.java
index 1372afb8b..b67e4d638 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsService.java
@@ -19,9 +19,7 @@
@Generated
public interface QualityMonitorsService {
/**
- * Cancel refresh.
- *
- * Cancel an active monitor refresh for the given refresh ID.
+ * Cancel an active monitor refresh for the given refresh ID.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -33,9 +31,7 @@ public interface QualityMonitorsService {
void cancelRefresh(CancelRefreshRequest cancelRefreshRequest);
/**
- * Create a table monitor.
- *
- * Creates a new monitor for the specified table.
+ * Creates a new monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on
* the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on
@@ -49,9 +45,7 @@ public interface QualityMonitorsService {
MonitorInfo create(CreateMonitor createMonitor);
/**
- * Delete a table monitor.
- *
- * Deletes a monitor for the specified table.
+ * Deletes a monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -66,9 +60,7 @@ public interface QualityMonitorsService {
void delete(DeleteQualityMonitorRequest deleteQualityMonitorRequest);
/**
- * Get a table monitor.
- *
- * Gets a monitor for the specified table.
+ * Gets a monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema. 3. have the
@@ -82,9 +74,7 @@ public interface QualityMonitorsService {
MonitorInfo get(GetQualityMonitorRequest getQualityMonitorRequest);
/**
- * Get refresh.
- *
- * Gets info about a specific monitor refresh using the given refresh ID.
+ * Gets info about a specific monitor refresh using the given refresh ID.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -96,9 +86,7 @@ public interface QualityMonitorsService {
MonitorRefreshInfo getRefresh(GetRefreshRequest getRefreshRequest);
/**
- * List refreshes.
- *
- * Gets an array containing the history of the most recent refreshes (up to 25) for this table.
+ * Gets an array containing the history of the most recent refreshes (up to 25) for this table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -110,9 +98,7 @@ public interface QualityMonitorsService {
MonitorRefreshListResponse listRefreshes(ListRefreshesRequest listRefreshesRequest);
/**
- * Regenerate a monitoring dashboard.
- *
- * Regenerates the monitoring dashboard for the specified table.
+ * Regenerates the monitoring dashboard for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -126,10 +112,8 @@ RegenerateDashboardResponse regenerateDashboard(
RegenerateDashboardRequest regenerateDashboardRequest);
/**
- * Queue a metric refresh for a monitor.
- *
- * Queues a metric refresh on the monitor for the specified table. The refresh will execute in
- * the background.
+ * Queues a metric refresh on the monitor for the specified table. The refresh will execute in the
+ * background.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
@@ -141,9 +125,7 @@ RegenerateDashboardResponse regenerateDashboard(
MonitorRefreshInfo runRefresh(RunRefreshRequest runRefreshRequest);
/**
- * Update a table monitor.
- *
- * Updates a monitor for the specified table.
+ * Updates a monitor for the specified table.
*
* The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
* on the table's parent catalog and be an owner of the table's parent schema 3. have the
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java
index db3ff1835..671861277 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a Volume */
@Generated
public class ReadVolumeRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
index 9c17899de..22c7ba7e5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java
@@ -58,9 +58,7 @@ public RegisteredModelInfo create(String catalogName, String schemaName, String
}
/**
- * Create a Registered Model.
- *
- * Creates a new registered model in Unity Catalog.
+ * Creates a new registered model in Unity Catalog.
*
* File storage for model versions in the registered model will be located in the default
* location which is specified by the parent schema, or the parent catalog, or the Metastore.
@@ -80,9 +78,7 @@ public void delete(String fullName) {
}
/**
- * Delete a Registered Model.
- *
- * Deletes a registered model and all its model versions from the specified parent catalog and
+ * Deletes a registered model and all its model versions from the specified parent catalog and
* schema.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
@@ -98,9 +94,7 @@ public void deleteAlias(String fullName, String alias) {
}
/**
- * Delete a Registered Model Alias.
- *
- * Deletes a registered model alias.
+ * Deletes a registered model alias.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
* case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
@@ -115,9 +109,7 @@ public RegisteredModelInfo get(String fullName) {
}
/**
- * Get a Registered Model.
- *
- * Get a registered model.
+ * Get a registered model.
*
* The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on)
* the registered model. For the latter case, the caller must also be the owner or have the
@@ -129,10 +121,8 @@ public RegisteredModelInfo get(GetRegisteredModelRequest request) {
}
/**
- * List Registered Models.
- *
- * List registered models. You can list registered models under a particular schema, or list
- * all registered models in the current metastore.
+ * List registered models. You can list registered models under a particular schema, or list all
+ * registered models in the current metastore.
*
* The returned models are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the registered models. A regular user needs to be the
@@ -166,9 +156,7 @@ public RegisteredModelAlias setAlias(String fullName, String alias, long version
}
/**
- * Set a Registered Model Alias.
- *
- * Set an alias on the specified registered model.
+ * Set an alias on the specified registered model.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
* case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
@@ -183,9 +171,7 @@ public RegisteredModelInfo update(String fullName) {
}
/**
- * Update a Registered Model.
- *
- * Updates the specified registered model.
+ * Updates the specified registered model.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
* case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java
index c8027d806..a03772d04 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java
@@ -36,9 +36,7 @@
@Generated
public interface RegisteredModelsService {
/**
- * Create a Registered Model.
- *
- * Creates a new registered model in Unity Catalog.
+ * Creates a new registered model in Unity Catalog.
*
* File storage for model versions in the registered model will be located in the default
* location which is specified by the parent schema, or the parent catalog, or the Metastore.
@@ -52,9 +50,7 @@ public interface RegisteredModelsService {
RegisteredModelInfo create(CreateRegisteredModelRequest createRegisteredModelRequest);
/**
- * Delete a Registered Model.
- *
- * Deletes a registered model and all its model versions from the specified parent catalog and
+ * Deletes a registered model and all its model versions from the specified parent catalog and
* schema.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
@@ -64,9 +60,7 @@ public interface RegisteredModelsService {
void delete(DeleteRegisteredModelRequest deleteRegisteredModelRequest);
/**
- * Delete a Registered Model Alias.
- *
- * Deletes a registered model alias.
+ * Deletes a registered model alias.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
* case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
@@ -75,9 +69,7 @@ public interface RegisteredModelsService {
void deleteAlias(DeleteAliasRequest deleteAliasRequest);
/**
- * Get a Registered Model.
- *
- * Get a registered model.
+ * Get a registered model.
*
* The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on)
* the registered model. For the latter case, the caller must also be the owner or have the
@@ -87,10 +79,8 @@ public interface RegisteredModelsService {
RegisteredModelInfo get(GetRegisteredModelRequest getRegisteredModelRequest);
/**
- * List Registered Models.
- *
- * List registered models. You can list registered models under a particular schema, or list
- * all registered models in the current metastore.
+ * List registered models. You can list registered models under a particular schema, or list all
+ * registered models in the current metastore.
*
* The returned models are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the registered models. A regular user needs to be the
@@ -104,9 +94,7 @@ public interface RegisteredModelsService {
ListRegisteredModelsResponse list(ListRegisteredModelsRequest listRegisteredModelsRequest);
/**
- * Set a Registered Model Alias.
- *
- * Set an alias on the specified registered model.
+ * Set an alias on the specified registered model.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
* case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
@@ -115,9 +103,7 @@ public interface RegisteredModelsService {
RegisteredModelAlias setAlias(SetRegisteredModelAliasRequest setRegisteredModelAliasRequest);
/**
- * Update a Registered Model.
- *
- * Updates the specified registered model.
+ * Updates the specified registered model.
*
* The caller must be a metastore admin or an owner of the registered model. For the latter
* case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java
index 4dd5da2db..cb47b73c9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java
@@ -43,9 +43,7 @@ public GetQuotaResponse getQuota(
}
/**
- * Get information for a single resource quota.
- *
- * The GetQuota API returns usage information for a single resource quota, defined as a
+ * The GetQuota API returns usage information for a single resource quota, defined as a
* child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are
* triggered asynchronously. The updated count might not be returned in the first call.
*/
@@ -54,10 +52,8 @@ public GetQuotaResponse getQuota(GetQuotaRequest request) {
}
/**
- * List all resource quotas under a metastore.
- *
- * ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness
- * of the counts returned. This API does not trigger a refresh of quota counts.
+ * ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of
+ * the counts returned. This API does not trigger a refresh of quota counts.
*/
public Iterable The GetQuota API returns usage information for a single resource quota, defined as a
+ * The GetQuota API returns usage information for a single resource quota, defined as a
* child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are
* triggered asynchronously. The updated count might not be returned in the first call.
*/
GetQuotaResponse getQuota(GetQuotaRequest getQuotaRequest);
/**
- * List all resource quotas under a metastore.
- *
- * ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness
- * of the counts returned. This API does not trigger a refresh of quota counts.
+ * ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of
+ * the counts returned. This API does not trigger a refresh of quota counts.
*/
ListQuotasResponse listQuotas(ListQuotasRequest listQuotasRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java
index de17a87b5..60ab501e5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Queue a metric refresh for a monitor */
@Generated
public class RunRefreshRequest {
/** Full name of the table. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
index acaa88214..803d504c1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
@@ -34,10 +34,8 @@ public SchemaInfo create(String name, String catalogName) {
}
/**
- * Create a schema.
- *
- * Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin,
- * or have the **CREATE_SCHEMA** privilege in the parent catalog.
+ * Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or
+ * have the **CREATE_SCHEMA** privilege in the parent catalog.
*/
public SchemaInfo create(CreateSchema request) {
return impl.create(request);
@@ -48,9 +46,7 @@ public void delete(String fullName) {
}
/**
- * Delete a schema.
- *
- * Deletes the specified schema from the parent catalog. The caller must be the owner of the
+ * Deletes the specified schema from the parent catalog. The caller must be the owner of the
* schema or an owner of the parent catalog.
*/
public void delete(DeleteSchemaRequest request) {
@@ -62,10 +58,8 @@ public SchemaInfo get(String fullName) {
}
/**
- * Get a schema.
- *
- * Gets the specified schema within the metastore. The caller must be a metastore admin, the
- * owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema.
+ * Gets the specified schema within the metastore. The caller must be a metastore admin, the owner
+ * of the schema, or a user that has the **USE_SCHEMA** privilege on the schema.
*/
public SchemaInfo get(GetSchemaRequest request) {
return impl.get(request);
@@ -76,13 +70,10 @@ public Iterable Gets an array of schemas for a catalog in the metastore. If the caller is the metastore
- * admin or the owner of the parent catalog, all schemas for the catalog will be retrieved.
- * Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA**
- * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in
- * the array.
+ * Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin
+ * or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise,
+ * only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege)
+ * will be retrieved. There is no guarantee of a specific ordering of the elements in the array.
*/
public Iterable Updates a schema for a catalog. The caller must be the owner of the schema or a metastore
+ * Updates a schema for a catalog. The caller must be the owner of the schema or a metastore
* admin. If the caller is a metastore admin, only the __owner__ field can be changed in the
* update. If the __name__ field must be updated, the caller must be a metastore admin or have the
* **CREATE_SCHEMA** privilege on the parent catalog.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
index e18efa0d8..816ed12fa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
@@ -16,44 +16,33 @@
@Generated
public interface SchemasService {
/**
- * Create a schema.
- *
- * Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin,
- * or have the **CREATE_SCHEMA** privilege in the parent catalog.
+ * Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or
+ * have the **CREATE_SCHEMA** privilege in the parent catalog.
*/
SchemaInfo create(CreateSchema createSchema);
/**
- * Delete a schema.
- *
- * Deletes the specified schema from the parent catalog. The caller must be the owner of the
+ * Deletes the specified schema from the parent catalog. The caller must be the owner of the
* schema or an owner of the parent catalog.
*/
void delete(DeleteSchemaRequest deleteSchemaRequest);
/**
- * Get a schema.
- *
- * Gets the specified schema within the metastore. The caller must be a metastore admin, the
- * owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema.
+ * Gets the specified schema within the metastore. The caller must be a metastore admin, the owner
+ * of the schema, or a user that has the **USE_SCHEMA** privilege on the schema.
*/
SchemaInfo get(GetSchemaRequest getSchemaRequest);
/**
- * List schemas.
- *
- * Gets an array of schemas for a catalog in the metastore. If the caller is the metastore
- * admin or the owner of the parent catalog, all schemas for the catalog will be retrieved.
- * Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA**
- * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in
- * the array.
+ * Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin
+ * or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise,
+ * only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege)
+ * will be retrieved. There is no guarantee of a specific ordering of the elements in the array.
*/
ListSchemasResponse list(ListSchemasRequest listSchemasRequest);
/**
- * Update a schema.
- *
- * Updates a schema for a catalog. The caller must be the owner of the schema or a metastore
+ * Updates a schema for a catalog. The caller must be the owner of the schema or a metastore
* admin. If the caller is a metastore admin, only the __owner__ field can be changed in the
* update. If the __name__ field must be updated, the caller must be a metastore admin or have the
* **CREATE_SCHEMA** privilege on the parent catalog.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
new file mode 100755
index 000000000..d4bdc219e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum SecurableKind {
+ TABLE_DB_STORAGE,
+ TABLE_DELTA,
+ TABLE_DELTASHARING,
+ TABLE_DELTASHARING_MUTABLE,
+ TABLE_DELTA_EXTERNAL,
+ TABLE_DELTA_ICEBERG_MANAGED,
+ TABLE_DELTA_UNIFORM_HUDI_EXTERNAL,
+ TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL,
+ TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL,
+ TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED,
+ TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE,
+ TABLE_EXTERNAL,
+ TABLE_FEATURE_STORE,
+ TABLE_FEATURE_STORE_EXTERNAL,
+ TABLE_FOREIGN_BIGQUERY,
+ TABLE_FOREIGN_DATABRICKS,
+ TABLE_FOREIGN_DELTASHARING,
+ TABLE_FOREIGN_HIVE_METASTORE,
+ TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL,
+ TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED,
+ TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL,
+ TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED,
+ TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW,
+ TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL,
+ TABLE_FOREIGN_HIVE_METASTORE_MANAGED,
+ TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL,
+ TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED,
+ TABLE_FOREIGN_HIVE_METASTORE_VIEW,
+ TABLE_FOREIGN_MONGODB,
+ TABLE_FOREIGN_MYSQL,
+ TABLE_FOREIGN_NETSUITE,
+ TABLE_FOREIGN_ORACLE,
+ TABLE_FOREIGN_POSTGRESQL,
+ TABLE_FOREIGN_REDSHIFT,
+ TABLE_FOREIGN_SALESFORCE,
+ TABLE_FOREIGN_SALESFORCE_DATA_CLOUD,
+ TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING,
+ TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW,
+ TABLE_FOREIGN_SNOWFLAKE,
+ TABLE_FOREIGN_SQLDW,
+ TABLE_FOREIGN_SQLSERVER,
+ TABLE_FOREIGN_TERADATA,
+ TABLE_FOREIGN_WORKDAY_RAAS,
+ TABLE_ICEBERG_UNIFORM_MANAGED,
+ TABLE_INTERNAL,
+ TABLE_MANAGED_POSTGRESQL,
+ TABLE_MATERIALIZED_VIEW,
+ TABLE_MATERIALIZED_VIEW_DELTASHARING,
+ TABLE_METRIC_VIEW,
+ TABLE_ONLINE_VECTOR_INDEX_DIRECT,
+ TABLE_ONLINE_VECTOR_INDEX_REPLICA,
+ TABLE_ONLINE_VIEW,
+ TABLE_STANDARD,
+ TABLE_STREAMING_LIVE_TABLE,
+ TABLE_STREAMING_LIVE_TABLE_DELTASHARING,
+ TABLE_SYSTEM,
+ TABLE_SYSTEM_DELTASHARING,
+ TABLE_VIEW,
+ TABLE_VIEW_DELTASHARING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java
new file mode 100755
index 000000000..450be090d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java
@@ -0,0 +1,143 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Manifest of a specific securable kind. */
+@Generated
+public class SecurableKindManifest {
+ /** A list of allowed option names, consistent with the 'options' field. */
+ @JsonProperty("allowedOptions")
+ private Collection Creates a new storage credential.
+ * The caller must be a metastore admin or have the **CREATE_STORAGE_CREDENTIAL** privilege on
+ * the metastore.
*/
public StorageCredentialInfo create(CreateStorageCredential request) {
return impl.create(request);
@@ -54,9 +55,7 @@ public void delete(String name) {
}
/**
- * Delete a credential.
- *
- * Deletes a storage credential from the metastore. The caller must be an owner of the storage
+ * Deletes a storage credential from the metastore. The caller must be an owner of the storage
* credential.
*/
public void delete(DeleteStorageCredentialRequest request) {
@@ -68,28 +67,20 @@ public StorageCredentialInfo get(String name) {
}
/**
- * Get a credential.
- *
- * Gets a storage credential from the metastore. The caller must be a metastore admin, the
- * owner of the storage credential, or have some permission on the storage credential.
+ * Gets a storage credential from the metastore. The caller must be a metastore admin, the owner
+ * of the storage credential, or have some permission on the storage credential.
*/
public StorageCredentialInfo get(GetStorageCredentialRequest request) {
return impl.get(request);
}
/**
- * List credentials.
- *
- * Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
+ * Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
* limited to only those storage credentials the caller has permission to access. If the caller is
* a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a
* specific ordering of the elements in the array.
*/
public Iterable Updates a storage credential on the metastore.
+ * The caller must be the owner of the storage credential or a metastore admin. If the caller
+ * is a metastore admin, only the **owner** field can be changed.
*/
public StorageCredentialInfo update(UpdateStorageCredential request) {
return impl.update(request);
}
/**
- * Validate a storage credential.
- *
- * Validates a storage credential. At least one of __external_location_name__ and __url__ need
- * to be provided. If only one of them is provided, it will be used for validation. And if both
- * are provided, the __url__ will be used for validation, and __external_location_name__ will be
+ * Validates a storage credential. At least one of __external_location_name__ and __url__ need to
+ * be provided. If only one of them is provided, it will be used for validation. And if both are
+ * provided, the __url__ will be used for validation, and __external_location_name__ will be
* ignored when checking overlapping urls.
*
* Either the __storage_credential_name__ or the cloud-specific credential must be provided.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
index 23af9af76..b403f3e83 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
@@ -23,32 +23,27 @@
@Generated
public interface StorageCredentialsService {
/**
- * Create a storage credential.
+ * Creates a new storage credential.
*
- * Creates a new storage credential.
+ * The caller must be a metastore admin or have the **CREATE_STORAGE_CREDENTIAL** privilege on
+ * the metastore.
*/
StorageCredentialInfo create(CreateStorageCredential createStorageCredential);
/**
- * Delete a credential.
- *
- * Deletes a storage credential from the metastore. The caller must be an owner of the storage
+ * Deletes a storage credential from the metastore. The caller must be an owner of the storage
* credential.
*/
void delete(DeleteStorageCredentialRequest deleteStorageCredentialRequest);
/**
- * Get a credential.
- *
- * Gets a storage credential from the metastore. The caller must be a metastore admin, the
- * owner of the storage credential, or have some permission on the storage credential.
+ * Gets a storage credential from the metastore. The caller must be a metastore admin, the owner
+ * of the storage credential, or have some permission on the storage credential.
*/
StorageCredentialInfo get(GetStorageCredentialRequest getStorageCredentialRequest);
/**
- * List credentials.
- *
- * Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
+ * Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
* limited to only those storage credentials the caller has permission to access. If the caller is
* a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a
* specific ordering of the elements in the array.
@@ -56,18 +51,17 @@ public interface StorageCredentialsService {
ListStorageCredentialsResponse list(ListStorageCredentialsRequest listStorageCredentialsRequest);
/**
- * Update a credential.
+ * Updates a storage credential on the metastore.
*
- * Updates a storage credential on the metastore.
+ * The caller must be the owner of the storage credential or a metastore admin. If the caller
+ * is a metastore admin, only the **owner** field can be changed.
*/
StorageCredentialInfo update(UpdateStorageCredential updateStorageCredential);
/**
- * Validate a storage credential.
- *
- * Validates a storage credential. At least one of __external_location_name__ and __url__ need
- * to be provided. If only one of them is provided, it will be used for validation. And if both
- * are provided, the __url__ will be used for validation, and __external_location_name__ will be
+ * Validates a storage credential. At least one of __external_location_name__ and __url__ need to
+ * be provided. If only one of them is provided, it will be used for validation. And if both are
+ * provided, the __url__ will be used for validation, and __external_location_name__ will be
* ignored when checking overlapping urls.
*
* Either the __storage_credential_name__ or the cloud-specific credential must be provided.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
index 605036b9e..17fcaa1a9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
@@ -33,9 +33,7 @@ public void disable(String metastoreId, String schemaName) {
}
/**
- * Disable a system schema.
- *
- * Disables the system schema and removes it from the system catalog. The caller must be an
+ * Disables the system schema and removes it from the system catalog. The caller must be an
* account admin or a metastore admin.
*/
public void disable(DisableRequest request) {
@@ -47,9 +45,7 @@ public void enable(String metastoreId, String schemaName) {
}
/**
- * Enable a system schema.
- *
- * Enables the system schema and adds it to the system catalog. The caller must be an account
+ * Enables the system schema and adds it to the system catalog. The caller must be an account
* admin or a metastore admin.
*/
public void enable(EnableRequest request) {
@@ -61,9 +57,7 @@ public Iterable Gets an array of system schemas for a metastore. The caller must be an account admin or a
+ * Gets an array of system schemas for a metastore. The caller must be an account admin or a
* metastore admin.
*/
public Iterable Disables the system schema and removes it from the system catalog. The caller must be an
+ * Disables the system schema and removes it from the system catalog. The caller must be an
* account admin or a metastore admin.
*/
void disable(DisableRequest disableRequest);
/**
- * Enable a system schema.
- *
- * Enables the system schema and adds it to the system catalog. The caller must be an account
+ * Enables the system schema and adds it to the system catalog. The caller must be an account
* admin or a metastore admin.
*/
void enable(EnableRequest enableRequest);
/**
- * List system schemas.
- *
- * Gets an array of system schemas for a metastore. The caller must be an account admin or a
+ * Gets an array of system schemas for a metastore. The caller must be an account admin or a
* metastore admin.
*/
ListSystemSchemasResponse list(ListSystemSchemasRequest listSystemSchemasRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java
index 2368e848b..4250c5b14 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java
@@ -40,9 +40,7 @@ public TableConstraint create(String fullNameArg, TableConstraint constraint) {
}
/**
- * Create a table constraint.
- *
- * Creates a new table constraint.
+ * Creates a new table constraint.
*
* For the table constraint creation to succeed, the user must satisfy both of these
* conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog,
@@ -64,9 +62,7 @@ public void delete(String fullName, String constraintName, boolean cascade) {
}
/**
- * Delete a table constraint.
- *
- * Deletes a table constraint.
+ * Deletes a table constraint.
*
* For the table constraint deletion to succeed, the user must satisfy both of these
* conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsService.java
index ee1d7c9c6..9fb50719c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsService.java
@@ -22,9 +22,7 @@
@Generated
public interface TableConstraintsService {
/**
- * Create a table constraint.
- *
- * Creates a new table constraint.
+ * Creates a new table constraint.
*
* For the table constraint creation to succeed, the user must satisfy both of these
* conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog,
@@ -36,9 +34,7 @@ public interface TableConstraintsService {
TableConstraint create(CreateTableConstraint createTableConstraint);
/**
- * Delete a table constraint.
- *
- * Deletes a table constraint.
+ * Deletes a table constraint.
*
* For the table constraint deletion to succeed, the user must satisfy both of these
* conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java
index 94c4d8abf..02fc9066e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java
@@ -108,6 +108,10 @@ public class TableInfo {
@JsonProperty("schema_name")
private String schemaName;
+ /** SecurableKindManifest of table, including capabilities the table has. */
+ @JsonProperty("securable_kind_manifest")
+ private SecurableKindManifest securableKindManifest;
+
/** List of schemes whose objects can be referenced without qualification. */
@JsonProperty("sql_path")
private String sqlPath;
@@ -119,7 +123,7 @@ public class TableInfo {
@JsonProperty("storage_credential_name")
private String storageCredentialName;
- /** Storage root URL for table (for **MANAGED**, **EXTERNAL** tables) */
+ /** Storage root URL for table (for **MANAGED**, **EXTERNAL** tables). */
@JsonProperty("storage_location")
private String storageLocation;
@@ -362,6 +366,15 @@ public String getSchemaName() {
return schemaName;
}
+ public TableInfo setSecurableKindManifest(SecurableKindManifest securableKindManifest) {
+ this.securableKindManifest = securableKindManifest;
+ return this;
+ }
+
+ public SecurableKindManifest getSecurableKindManifest() {
+ return securableKindManifest;
+ }
+
public TableInfo setSqlPath(String sqlPath) {
this.sqlPath = sqlPath;
return this;
@@ -480,6 +493,7 @@ public boolean equals(Object o) {
&& Objects.equals(properties, that.properties)
&& Objects.equals(rowFilter, that.rowFilter)
&& Objects.equals(schemaName, that.schemaName)
+ && Objects.equals(securableKindManifest, that.securableKindManifest)
&& Objects.equals(sqlPath, that.sqlPath)
&& Objects.equals(storageCredentialName, that.storageCredentialName)
&& Objects.equals(storageLocation, that.storageLocation)
@@ -517,6 +531,7 @@ public int hashCode() {
properties,
rowFilter,
schemaName,
+ securableKindManifest,
sqlPath,
storageCredentialName,
storageLocation,
@@ -554,6 +569,7 @@ public String toString() {
.add("properties", properties)
.add("rowFilter", rowFilter)
.add("schemaName", schemaName)
+ .add("securableKindManifest", securableKindManifest)
.add("sqlPath", sqlPath)
.add("storageCredentialName", storageCredentialName)
.add("storageLocation", storageLocation)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java
index ea7c14e18..515394c42 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java
@@ -13,6 +13,10 @@ public class TableSummary {
@JsonProperty("full_name")
private String fullName;
+ /** SecurableKindManifest of table, including capabilities the table has. */
+ @JsonProperty("securable_kind_manifest")
+ private SecurableKindManifest securableKindManifest;
+
/** */
@JsonProperty("table_type")
private TableType tableType;
@@ -26,6 +30,15 @@ public String getFullName() {
return fullName;
}
+ public TableSummary setSecurableKindManifest(SecurableKindManifest securableKindManifest) {
+ this.securableKindManifest = securableKindManifest;
+ return this;
+ }
+
+ public SecurableKindManifest getSecurableKindManifest() {
+ return securableKindManifest;
+ }
+
public TableSummary setTableType(TableType tableType) {
this.tableType = tableType;
return this;
@@ -40,18 +53,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TableSummary that = (TableSummary) o;
- return Objects.equals(fullName, that.fullName) && Objects.equals(tableType, that.tableType);
+ return Objects.equals(fullName, that.fullName)
+ && Objects.equals(securableKindManifest, that.securableKindManifest)
+ && Objects.equals(tableType, that.tableType);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, tableType);
+ return Objects.hash(fullName, securableKindManifest, tableType);
}
@Override
public String toString() {
return new ToStringer(TableSummary.class)
.add("fullName", fullName)
+ .add("securableKindManifest", securableKindManifest)
.add("tableType", tableType)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.java
index f95dd9cfd..f92dc17ef 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.java
@@ -12,6 +12,7 @@ public enum TableType {
MANAGED,
MANAGED_SHALLOW_CLONE,
MATERIALIZED_VIEW,
+ METRIC_VIEW,
STREAMING_TABLE,
VIEW,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
index 620144b8e..d95e94e80 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
@@ -38,12 +38,10 @@ public void delete(String fullName) {
}
/**
- * Delete a table.
- *
- * Deletes a table from the specified parent catalog and schema. The caller must be the owner
- * of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the
- * owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege
- * on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+ * Deletes a table from the specified parent catalog and schema. The caller must be the owner of
+ * the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner
+ * of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on
+ * the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
*/
public void delete(DeleteTableRequest request) {
impl.delete(request);
@@ -54,9 +52,7 @@ public TableExistsResponse exists(String fullName) {
}
/**
- * Get boolean reflecting if table exists.
- *
- * Gets if a table exists in the metastore for a specific catalog and schema. The caller must
+ * Gets if a table exists in the metastore for a specific catalog and schema. The caller must
* satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent
* catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent
* catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
@@ -73,13 +69,11 @@ public TableInfo get(String fullName) {
}
/**
- * Get a table.
- *
- * Gets a table from the metastore for a specific catalog and schema. The caller must satisfy
- * one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog
- * * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog *
- * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on
- * the parent schema, and either be the table owner or have the SELECT privilege on the table.
+ * Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one
+ * of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be
+ * the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have
+ * the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the
+ * parent schema, and either be the table owner or have the SELECT privilege on the table.
*/
public TableInfo get(GetTableRequest request) {
return impl.get(request);
@@ -90,19 +84,13 @@ public Iterable Gets an array of all tables for the current metastore under the parent catalog and schema.
- * The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the
+ * Gets an array of all tables for the current metastore under the parent catalog and schema. The
+ * caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the
* table. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
* privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is
* no guarantee of a specific ordering of the elements in the array.
*/
public Iterable Gets an array of summaries for tables for a schema and catalog within the metastore. The
- * table summaries returned are either:
+ * Gets an array of summaries for tables for a schema and catalog within the metastore. The table
+ * summaries returned are either:
*
* * summaries for tables (within the current metastore and parent catalog and schema), when
* the user is a metastore admin, or: * summaries for tables and schemas (within the current
@@ -135,10 +121,6 @@ public Iterable There is no guarantee of a specific ordering of the elements in the array.
*/
public Iterable Change the owner of the table. The caller must be the owner of the parent catalog, have the
+ * Change the owner of the table. The caller must be the owner of the parent catalog, have the
* **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be
* the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the
* **USE_SCHEMA** privilege on the parent schema.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
index 54c506478..1b993a709 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
@@ -20,19 +20,15 @@
@Generated
public interface TablesService {
/**
- * Delete a table.
- *
- * Deletes a table from the specified parent catalog and schema. The caller must be the owner
- * of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the
- * owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege
- * on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+ * Deletes a table from the specified parent catalog and schema. The caller must be the owner of
+ * the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner
+ * of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on
+ * the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
*/
void delete(DeleteTableRequest deleteTableRequest);
/**
- * Get boolean reflecting if table exists.
- *
- * Gets if a table exists in the metastore for a specific catalog and schema. The caller must
+ * Gets if a table exists in the metastore for a specific catalog and schema. The caller must
* satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent
* catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent
* catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
@@ -43,21 +39,17 @@ public interface TablesService {
TableExistsResponse exists(ExistsRequest existsRequest);
/**
- * Get a table.
- *
- * Gets a table from the metastore for a specific catalog and schema. The caller must satisfy
- * one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog
- * * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog *
- * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on
- * the parent schema, and either be the table owner or have the SELECT privilege on the table.
+ * Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one
+ * of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be
+ * the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have
+ * the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the
+ * parent schema, and either be the table owner or have the SELECT privilege on the table.
*/
TableInfo get(GetTableRequest getTableRequest);
/**
- * List tables.
- *
- * Gets an array of all tables for the current metastore under the parent catalog and schema.
- * The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the
+ * Gets an array of all tables for the current metastore under the parent catalog and schema. The
+ * caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the
* table. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
* privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is
* no guarantee of a specific ordering of the elements in the array.
@@ -65,10 +57,8 @@ public interface TablesService {
ListTablesResponse list(ListTablesRequest listTablesRequest);
/**
- * List table summaries.
- *
- * Gets an array of summaries for tables for a schema and catalog within the metastore. The
- * table summaries returned are either:
+ * Gets an array of summaries for tables for a schema and catalog within the metastore. The table
+ * summaries returned are either:
*
* * summaries for tables (within the current metastore and parent catalog and schema), when
* the user is a metastore admin, or: * summaries for tables and schemas (within the current
@@ -81,9 +71,7 @@ public interface TablesService {
ListTableSummariesResponse listSummaries(ListSummariesRequest listSummariesRequest);
/**
- * Update a table owner.
- *
- * Change the owner of the table. The caller must be the owner of the parent catalog, have the
+ * Change the owner of the table. The caller must be the owner of the parent catalog, have the
* **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be
* the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the
* **USE_SCHEMA** privilege on the parent schema.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
index dd496a463..900b1179b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
@@ -38,9 +38,7 @@ public TemporaryTableCredentialsAPI(TemporaryTableCredentialsService mock) {
}
/**
- * Generate a temporary table credential.
- *
- * Get a short-lived credential for directly accessing the table data on cloud storage. The
+ * Get a short-lived credential for directly accessing the table data on cloud storage. The
* metastore must have external_access_enabled flag set to true (default false). The caller must
* have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted
* by catalog owners.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
index 881cbd01e..d8cc3e5bf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
@@ -25,9 +25,7 @@
@Generated
public interface TemporaryTableCredentialsService {
/**
- * Generate a temporary table credential.
- *
- * Get a short-lived credential for directly accessing the table data on cloud storage. The
+ * Get a short-lived credential for directly accessing the table data on cloud storage. The
* metastore must have external_access_enabled flag set to true (default false). The caller must
* have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted
* by catalog owners.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java
index 29c6aac69..ee40d9e87 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete an assignment */
@Generated
public class UnassignRequest {
/** Query for the ID of the metastore to delete. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java
index 6f91812cf..bba0e810a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java
@@ -10,7 +10,7 @@
@Generated
public class UpdateCredentialRequest {
- /** The AWS IAM role configuration */
+ /** The AWS IAM role configuration. */
@JsonProperty("aws_iam_role")
private AwsIamRole awsIamRole;
@@ -18,7 +18,7 @@ public class UpdateCredentialRequest {
@JsonProperty("azure_managed_identity")
private AzureManagedIdentity azureManagedIdentity;
- /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */
+ /** The Azure service principal configuration. */
@JsonProperty("azure_service_principal")
private AzureServicePrincipal azureServicePrincipal;
@@ -26,7 +26,7 @@ public class UpdateCredentialRequest {
@JsonProperty("comment")
private String comment;
- /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */
+ /** The Databricks managed GCP service account configuration. */
@JsonProperty("databricks_gcp_service_account")
private DatabricksGcpServiceAccount databricksGcpServiceAccount;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java
index 504151504..4829c49ae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java
@@ -38,7 +38,10 @@ public class UpdateStorageCredential {
@JsonProperty("force")
private Boolean force;
- /** */
+ /**
+ * Whether the current securable is accessible from all workspaces or a specific set of
+ * workspaces.
+ */
@JsonProperty("isolation_mode")
private IsolationMode isolationMode;
@@ -53,7 +56,10 @@ public class UpdateStorageCredential {
@JsonProperty("owner")
private String owner;
- /** Whether the storage credential is only usable for read operations. */
+ /**
+ * Whether the credential is usable only for read operations. Only applicable when purpose is
+ * **STORAGE**.
+ */
@JsonProperty("read_only")
private Boolean readOnly;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java
index 5f8a08040..f58635082 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java
@@ -8,13 +8,12 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Update a table owner. */
@Generated
public class UpdateTableRequest {
/** Full name of the table. */
@JsonIgnore private String fullName;
- /** */
+ /** Username of current owner of table. */
@JsonProperty("owner")
private String owner;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java
index 23fb6866a..b5267c325 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java
@@ -37,7 +37,7 @@ public class ValidateStorageCredential {
@JsonProperty("read_only")
private Boolean readOnly;
- /** The name of the storage credential to validate. */
+ /** Required. The name of an existing credential or long-lived cloud credential to validate. */
@JsonProperty("storage_credential_name")
private String storageCredentialName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java
index d0a625941..d1541f52d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java
@@ -4,7 +4,10 @@
import com.databricks.sdk.support.Generated;
-/** The operation tested. */
+/**
+ * A enum represents the file operation performed on the external location with the storage
+ * credential
+ */
@Generated
public enum ValidationResultOperation {
DELETE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultResult.java
index 5e3da01b3..66221c99a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultResult.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultResult.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** The results of the tested operation. */
+/** A enum represents the result of the file operation */
@Generated
public enum ValidationResultResult {
FAIL,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
index ea0dffa63..877baa8c0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java
@@ -43,9 +43,7 @@ public VolumeInfo create(
}
/**
- * Create a Volume.
- *
- * Creates a new volume.
+ * Creates a new volume.
*
* The user could create either an external volume or a managed volume. An external volume will
* be created in the specified external location, while a managed volume will be located in the
@@ -71,9 +69,7 @@ public void delete(String name) {
}
/**
- * Delete a Volume.
- *
- * Deletes a volume from the specified parent catalog and schema.
+ * Deletes a volume from the specified parent catalog and schema.
*
* The caller must be a metastore admin or an owner of the volume. For the latter case, the
* caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and
@@ -88,9 +84,7 @@ public Iterable Gets an array of volumes for the current metastore under the parent catalog and schema.
+ * Gets an array of volumes for the current metastore under the parent catalog and schema.
*
* The returned volumes are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the volumes. A regular user needs to be the owner or
@@ -119,9 +113,7 @@ public VolumeInfo read(String name) {
}
/**
- * Get a Volume.
- *
- * Gets a volume from the metastore for a specific catalog and schema.
+ * Gets a volume from the metastore for a specific catalog and schema.
*
* The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege
* on) the volume. For the latter case, the caller must also be the owner or have the
@@ -137,9 +129,7 @@ public VolumeInfo update(String name) {
}
/**
- * Update a Volume.
- *
- * Updates the specified volume under the specified parent catalog and schema.
+ * Updates the specified volume under the specified parent catalog and schema.
*
* The caller must be a metastore admin or an owner of the volume. For the latter case, the
* caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java
index d28782d55..fe725c7ef 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java
@@ -19,9 +19,7 @@
@Generated
public interface VolumesService {
/**
- * Create a Volume.
- *
- * Creates a new volume.
+ * Creates a new volume.
*
* The user could create either an external volume or a managed volume. An external volume will
* be created in the specified external location, while a managed volume will be located in the
@@ -41,9 +39,7 @@ public interface VolumesService {
VolumeInfo create(CreateVolumeRequestContent createVolumeRequestContent);
/**
- * Delete a Volume.
- *
- * Deletes a volume from the specified parent catalog and schema.
+ * Deletes a volume from the specified parent catalog and schema.
*
* The caller must be a metastore admin or an owner of the volume. For the latter case, the
* caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and
@@ -52,9 +48,7 @@ public interface VolumesService {
void delete(DeleteVolumeRequest deleteVolumeRequest);
/**
- * List Volumes.
- *
- * Gets an array of volumes for the current metastore under the parent catalog and schema.
+ * Gets an array of volumes for the current metastore under the parent catalog and schema.
*
* The returned volumes are filtered based on the privileges of the calling user. For example,
* the metastore admin is able to list all the volumes. A regular user needs to be the owner or
@@ -67,9 +61,7 @@ public interface VolumesService {
ListVolumesResponseContent list(ListVolumesRequest listVolumesRequest);
/**
- * Get a Volume.
- *
- * Gets a volume from the metastore for a specific catalog and schema.
+ * Gets a volume from the metastore for a specific catalog and schema.
*
* The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege
* on) the volume. For the latter case, the caller must also be the owner or have the
@@ -79,9 +71,7 @@ public interface VolumesService {
VolumeInfo read(ReadVolumeRequest readVolumeRequest);
/**
- * Update a Volume.
- *
- * Updates the specified volume under the specified parent catalog and schema.
+ * Updates the specified volume under the specified parent catalog and schema.
*
* The caller must be a metastore admin or an owner of the volume. For the latter case, the
* caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
index e82e88906..40f80851e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
@@ -47,10 +47,8 @@ public GetCatalogWorkspaceBindingsResponse get(String name) {
}
/**
- * Get catalog workspace bindings.
- *
- * Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of
- * the catalog.
+ * Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
+ * catalog.
*/
public GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest request) {
return impl.get(request);
@@ -62,10 +60,8 @@ public Iterable Gets workspace bindings of the securable. The caller must be a metastore admin or an owner
- * of the securable.
+ * Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of
+ * the securable.
*/
public Iterable Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner
- * of the catalog.
+ * Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of
+ * the catalog.
*/
public UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings request) {
return impl.update(request);
@@ -104,10 +98,8 @@ public UpdateWorkspaceBindingsResponse updateBindings(
}
/**
- * Update securable workspace bindings.
- *
- * Updates workspace bindings of the securable. The caller must be a metastore admin or an
- * owner of the securable.
+ * Updates workspace bindings of the securable. The caller must be a metastore admin or an owner
+ * of the securable.
*/
public UpdateWorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) {
return impl.updateBindings(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java
index 962b4ea69..00ab255f2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java
@@ -29,34 +29,26 @@
@Generated
public interface WorkspaceBindingsService {
/**
- * Get catalog workspace bindings.
- *
- * Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of
- * the catalog.
+ * Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
+ * catalog.
*/
GetCatalogWorkspaceBindingsResponse get(GetWorkspaceBindingRequest getWorkspaceBindingRequest);
/**
- * Get securable workspace bindings.
- *
- * Gets workspace bindings of the securable. The caller must be a metastore admin or an owner
- * of the securable.
+ * Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of
+ * the securable.
*/
GetWorkspaceBindingsResponse getBindings(GetBindingsRequest getBindingsRequest);
/**
- * Update catalog workspace bindings.
- *
- * Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner
- * of the catalog.
+ * Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of
+ * the catalog.
*/
UpdateCatalogWorkspaceBindingsResponse update(UpdateWorkspaceBindings updateWorkspaceBindings);
/**
- * Update securable workspace bindings.
- *
- * Updates workspace bindings of the securable. The caller must be a metastore admin or an
- * owner of the securable.
+ * Updates workspace bindings of the securable. The caller must be a metastore admin or an owner
+ * of the securable.
*/
UpdateWorkspaceBindingsResponse updateBindings(
UpdateWorkspaceBindingsParameters updateWorkspaceBindingsParameters);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
index cdb59e7a5..4cb019e0a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
@@ -33,9 +33,7 @@ public CleanRoomAsset create(String cleanRoomName, CleanRoomAsset asset) {
}
/**
- * Create an asset.
- *
- * Create a clean room asset —share an asset like a notebook or table into the clean room. For
+ * Create a clean room asset —share an asset like a notebook or table into the clean room. For
* each UC asset that is added through this method, the clean room owner must also have enough
* privilege on the asset to consume it. The privilege must be maintained indefinitely for the
* clean room to be able to access the asset. Typically, you should use a group as the clean room
@@ -53,11 +51,7 @@ public void delete(String cleanRoomName, CleanRoomAssetAssetType assetType, Stri
.setName(name));
}
- /**
- * Delete an asset.
- *
- * Delete a clean room asset - unshare/remove the asset from the clean room
- */
+ /** Delete a clean room asset - unshare/remove the asset from the clean room */
public void delete(DeleteCleanRoomAssetRequest request) {
impl.delete(request);
}
@@ -70,11 +64,7 @@ public CleanRoomAsset get(String cleanRoomName, CleanRoomAssetAssetType assetTyp
.setName(name));
}
- /**
- * Get an asset.
- *
- * Get the details of a clean room asset by its type and full name.
- */
+ /** Get the details of a clean room asset by its type and full name. */
public CleanRoomAsset get(GetCleanRoomAssetRequest request) {
return impl.get(request);
}
@@ -109,10 +99,8 @@ public CleanRoomAsset update(
}
/**
- * Update an asset.
- *
- * Update a clean room asset. For example, updating the content of a notebook; changing the
- * shared partitions of a table; etc.
+ * Update a clean room asset. For example, updating the content of a notebook; changing the shared
+ * partitions of a table; etc.
*/
public CleanRoomAsset update(UpdateCleanRoomAssetRequest request) {
return impl.update(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java
index 29921c682..b1ced1e69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java
@@ -14,9 +14,7 @@
@Generated
public interface CleanRoomAssetsService {
/**
- * Create an asset.
- *
- * Create a clean room asset —share an asset like a notebook or table into the clean room. For
+ * Create a clean room asset —share an asset like a notebook or table into the clean room. For
* each UC asset that is added through this method, the clean room owner must also have enough
* privilege on the asset to consume it. The privilege must be maintained indefinitely for the
* clean room to be able to access the asset. Typically, you should use a group as the clean room
@@ -24,28 +22,18 @@ public interface CleanRoomAssetsService {
*/
CleanRoomAsset create(CreateCleanRoomAssetRequest createCleanRoomAssetRequest);
- /**
- * Delete an asset.
- *
- * Delete a clean room asset - unshare/remove the asset from the clean room
- */
+ /** Delete a clean room asset - unshare/remove the asset from the clean room */
void delete(DeleteCleanRoomAssetRequest deleteCleanRoomAssetRequest);
- /**
- * Get an asset.
- *
- * Get the details of a clean room asset by its type and full name.
- */
+ /** Get the details of a clean room asset by its type and full name. */
CleanRoomAsset get(GetCleanRoomAssetRequest getCleanRoomAssetRequest);
/** List assets. */
ListCleanRoomAssetsResponse list(ListCleanRoomAssetsRequest listCleanRoomAssetsRequest);
/**
- * Update an asset.
- *
- * Update a clean room asset. For example, updating the content of a notebook; changing the
- * shared partitions of a table; etc.
+ * Update a clean room asset. For example, updating the content of a notebook; changing the shared
+ * partitions of a table; etc.
*/
CleanRoomAsset update(UpdateCleanRoomAssetRequest updateCleanRoomAssetRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java
index c64abea25..38e341fcf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java
@@ -28,11 +28,7 @@ public Iterable List all the historical notebook task runs in a clean room.
- */
+ /** List all the historical notebook task runs in a clean room. */
public Iterable List all the historical notebook task runs in a clean room.
- */
+ /** List all the historical notebook task runs in a clean room. */
ListCleanRoomNotebookTaskRunsResponse list(
ListCleanRoomNotebookTaskRunsRequest listCleanRoomNotebookTaskRunsRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
index be34cc9db..69469fc0c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
@@ -33,9 +33,7 @@ public CleanRoom create(CleanRoom cleanRoom) {
}
/**
- * Create a clean room.
- *
- * Create a new clean room with the specified collaborators. This method is asynchronous; the
+ * Create a new clean room with the specified collaborators. This method is asynchronous; the
* returned name field inside the clean_room field can be used to poll the clean room status,
* using the :method:cleanrooms/get method. When this method returns, the clean room will be in a
* PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean
@@ -56,11 +54,7 @@ public CreateCleanRoomOutputCatalogResponse createOutputCatalog(
.setOutputCatalog(outputCatalog));
}
- /**
- * Create an output catalog.
- *
- * Create the output catalog of the clean room.
- */
+ /** Create the output catalog of the clean room. */
public CreateCleanRoomOutputCatalogResponse createOutputCatalog(
CreateCleanRoomOutputCatalogRequest request) {
return impl.createOutputCatalog(request);
@@ -71,10 +65,8 @@ public void delete(String name) {
}
/**
- * Delete a clean room.
- *
- * Delete a clean room. After deletion, the clean room will be removed from the metastore. If
- * the other collaborators have not deleted the clean room, they will still have the clean room in
+ * Delete a clean room. After deletion, the clean room will be removed from the metastore. If the
+ * other collaborators have not deleted the clean room, they will still have the clean room in
* their metastore, but it will be in a DELETED state and no operations other than deletion can be
* performed on it.
*/
@@ -86,20 +78,14 @@ public CleanRoom get(String name) {
return get(new GetCleanRoomRequest().setName(name));
}
- /**
- * Get a clean room.
- *
- * Get the details of a clean room given its name.
- */
+ /** Get the details of a clean room given its name. */
public CleanRoom get(GetCleanRoomRequest request) {
return impl.get(request);
}
/**
- * List clean rooms.
- *
- * Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to
- * are returned.
+ * Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+ * returned.
*/
public Iterable Update a clean room. The caller must be the owner of the clean room, have
- * **MODIFY_CLEAN_ROOM** privilege, or be metastore admin.
+ * Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+ * privilege, or be metastore admin.
*
* When the caller is a metastore admin, only the __owner__ field can be updated.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java
index 07453308b..c3306e17d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java
@@ -15,9 +15,7 @@
@Generated
public interface CleanRoomsService {
/**
- * Create a clean room.
- *
- * Create a new clean room with the specified collaborators. This method is asynchronous; the
+ * Create a new clean room with the specified collaborators. This method is asynchronous; the
* returned name field inside the clean_room field can be used to poll the clean room status,
* using the :method:cleanrooms/get method. When this method returns, the clean room will be in a
* PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean
@@ -28,44 +26,30 @@ public interface CleanRoomsService {
*/
CleanRoom create(CreateCleanRoomRequest createCleanRoomRequest);
- /**
- * Create an output catalog.
- *
- * Create the output catalog of the clean room.
- */
+ /** Create the output catalog of the clean room. */
CreateCleanRoomOutputCatalogResponse createOutputCatalog(
CreateCleanRoomOutputCatalogRequest createCleanRoomOutputCatalogRequest);
/**
- * Delete a clean room.
- *
- * Delete a clean room. After deletion, the clean room will be removed from the metastore. If
- * the other collaborators have not deleted the clean room, they will still have the clean room in
+ * Delete a clean room. After deletion, the clean room will be removed from the metastore. If the
+ * other collaborators have not deleted the clean room, they will still have the clean room in
* their metastore, but it will be in a DELETED state and no operations other than deletion can be
* performed on it.
*/
void delete(DeleteCleanRoomRequest deleteCleanRoomRequest);
- /**
- * Get a clean room.
- *
- * Get the details of a clean room given its name.
- */
+ /** Get the details of a clean room given its name. */
CleanRoom get(GetCleanRoomRequest getCleanRoomRequest);
/**
- * List clean rooms.
- *
- * Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to
- * are returned.
+ * Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+ * returned.
*/
ListCleanRoomsResponse list(ListCleanRoomsRequest listCleanRoomsRequest);
/**
- * Update a clean room.
- *
- * Update a clean room. The caller must be the owner of the clean room, have
- * **MODIFY_CLEAN_ROOM** privilege, or be metastore admin.
+ * Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+ * privilege, or be metastore admin.
*
* When the caller is a metastore admin, only the __owner__ field can be updated.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java
index 5a36d4906..5cc4c4842 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create an asset */
@Generated
public class CreateCleanRoomAssetRequest {
/** Metadata of the clean room asset */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java
index 230ec81f3..575774a05 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create an output catalog */
@Generated
public class CreateCleanRoomOutputCatalogRequest {
/** Name of the clean room. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java
index 13930fe82..e6e00ff2f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create a clean room */
@Generated
public class CreateCleanRoomRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java
index 482c33464..15a84a03d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete an asset */
@Generated
public class DeleteCleanRoomAssetRequest {
/** The type of the asset. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java
index 7681b1fc5..f1095848e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a clean room */
@Generated
public class DeleteCleanRoomRequest {
/** Name of the clean room. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java
index a82af23df..fc7af1ca8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get an asset */
@Generated
public class GetCleanRoomAssetRequest {
/** The type of the asset. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java
index 6103c2c8f..60ebb1974 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a clean room */
@Generated
public class GetCleanRoomRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java
index 40bb4fef0..3643a1ac8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List assets */
@Generated
public class ListCleanRoomAssetsRequest {
/** Name of the clean room. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java
index 5b64fdb82..17b2a7a41 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List notebook task runs */
@Generated
public class ListCleanRoomNotebookTaskRunsRequest {
/** Name of the clean room. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java
index 45845b80a..f604b85d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List clean rooms */
@Generated
public class ListCleanRoomsRequest {
/** Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
index f1d57be5b..bda1f2843 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Update an asset */
@Generated
public class UpdateCleanRoomAssetRequest {
/** Metadata of the clean room asset */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
index ff9668106..17316f8c5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
@@ -191,6 +191,13 @@ public class ClusterAttributes {
@JsonProperty("policy_id")
private String policyId;
+ /**
+ * If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only
+ * supported for GCP HYPERDISK_BALANCED disks.
+ */
+ @JsonProperty("remote_disk_throughput")
+ private Long remoteDiskThroughput;
+
/**
* Determines the cluster's runtime engine, either standard or Photon.
*
@@ -246,6 +253,13 @@ public class ClusterAttributes {
@JsonProperty("ssh_public_keys")
private Collection Creates a new policy with prescribed settings.
- */
+ /** Creates a new policy with prescribed settings. */
public CreatePolicyResponse create(CreatePolicy request) {
return impl.create(request);
}
@@ -58,9 +54,7 @@ public void delete(String policyId) {
}
/**
- * Delete a cluster policy.
- *
- * Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be
+ * Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be
* edited.
*/
public void delete(DeletePolicy request) {
@@ -72,9 +66,7 @@ public void edit(String policyId) {
}
/**
- * Update a cluster policy.
- *
- * Update an existing policy for cluster. This operation may make some clusters governed by the
+ * Update an existing policy for cluster. This operation may make some clusters governed by the
* previous policy invalid.
*/
public void edit(EditPolicy request) {
@@ -85,11 +77,7 @@ public Policy get(String policyId) {
return get(new GetClusterPolicyRequest().setPolicyId(policyId));
}
- /**
- * Get a cluster policy.
- *
- * Get a cluster policy entity. Creation and editing is available to admins only.
- */
+ /** Get a cluster policy entity. Creation and editing is available to admins only. */
public Policy get(GetClusterPolicyRequest request) {
return impl.get(request);
}
@@ -99,11 +87,7 @@ public GetClusterPolicyPermissionLevelsResponse getPermissionLevels(String clust
new GetClusterPolicyPermissionLevelsRequest().setClusterPolicyId(clusterPolicyId));
}
- /**
- * Get cluster policy permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
public GetClusterPolicyPermissionLevelsResponse getPermissionLevels(
GetClusterPolicyPermissionLevelsRequest request) {
return impl.getPermissionLevels(request);
@@ -115,20 +99,14 @@ public ClusterPolicyPermissions getPermissions(String clusterPolicyId) {
}
/**
- * Get cluster policy permissions.
- *
- * Gets the permissions of a cluster policy. Cluster policies can inherit permissions from
- * their root object.
+ * Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their
+ * root object.
*/
public ClusterPolicyPermissions getPermissions(GetClusterPolicyPermissionsRequest request) {
return impl.getPermissions(request);
}
- /**
- * List cluster policies.
- *
- * Returns a list of policies accessible by the requesting user.
- */
+ /** Returns a list of policies accessible by the requesting user. */
public Iterable Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
public ClusterPolicyPermissions setPermissions(ClusterPolicyPermissionsRequest request) {
return impl.setPermissions(request);
@@ -156,9 +131,7 @@ public ClusterPolicyPermissions updatePermissions(String clusterPolicyId) {
}
/**
- * Update cluster policy permissions.
- *
- * Updates the permissions on a cluster policy. Cluster policies can inherit permissions from
+ * Updates the permissions on a cluster policy. Cluster policies can inherit permissions from
* their root object.
*/
public ClusterPolicyPermissions updatePermissions(ClusterPolicyPermissionsRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java
index 64f2a13f3..a8ef9fa37 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java
@@ -30,74 +30,47 @@
*/
@Generated
public interface ClusterPoliciesService {
- /**
- * Create a new policy.
- *
- * Creates a new policy with prescribed settings.
- */
+ /** Creates a new policy with prescribed settings. */
CreatePolicyResponse create(CreatePolicy createPolicy);
/**
- * Delete a cluster policy.
- *
- * Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be
+ * Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be
* edited.
*/
void delete(DeletePolicy deletePolicy);
/**
- * Update a cluster policy.
- *
- * Update an existing policy for cluster. This operation may make some clusters governed by the
+ * Update an existing policy for cluster. This operation may make some clusters governed by the
* previous policy invalid.
*/
void edit(EditPolicy editPolicy);
- /**
- * Get a cluster policy.
- *
- * Get a cluster policy entity. Creation and editing is available to admins only.
- */
+ /** Get a cluster policy entity. Creation and editing is available to admins only. */
Policy get(GetClusterPolicyRequest getClusterPolicyRequest);
- /**
- * Get cluster policy permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
GetClusterPolicyPermissionLevelsResponse getPermissionLevels(
GetClusterPolicyPermissionLevelsRequest getClusterPolicyPermissionLevelsRequest);
/**
- * Get cluster policy permissions.
- *
- * Gets the permissions of a cluster policy. Cluster policies can inherit permissions from
- * their root object.
+ * Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their
+ * root object.
*/
ClusterPolicyPermissions getPermissions(
GetClusterPolicyPermissionsRequest getClusterPolicyPermissionsRequest);
- /**
- * List cluster policies.
- *
- * Returns a list of policies accessible by the requesting user.
- */
+ /** Returns a list of policies accessible by the requesting user. */
ListPoliciesResponse list(ListClusterPoliciesRequest listClusterPoliciesRequest);
/**
- * Set cluster policy permissions.
- *
- * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
ClusterPolicyPermissions setPermissions(
ClusterPolicyPermissionsRequest clusterPolicyPermissionsRequest);
/**
- * Update cluster policy permissions.
- *
- * Updates the permissions on a cluster policy. Cluster policies can inherit permissions from
+ * Updates the permissions on a cluster policy. Cluster policies can inherit permissions from
* their root object.
*/
ClusterPolicyPermissions updatePermissions(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
index 08cd8a715..f93b893bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
@@ -218,6 +218,13 @@ public class ClusterSpec {
@JsonProperty("policy_id")
private String policyId;
+ /**
+ * If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only
+ * supported for GCP HYPERDISK_BALANCED disks.
+ */
+ @JsonProperty("remote_disk_throughput")
+ private Long remoteDiskThroughput;
+
/**
* Determines the cluster's runtime engine, either standard or Photon.
*
@@ -273,6 +280,13 @@ public class ClusterSpec {
@JsonProperty("ssh_public_keys")
private Collection Change the owner of the cluster. You must be an admin and the cluster must be terminated to
+ * Change the owner of the cluster. You must be an admin and the cluster must be terminated to
* perform this operation. The service principal application ID can be supplied as an argument to
* `owner_username`.
*/
@@ -162,10 +160,8 @@ public Wait Creates a new Spark cluster. This method will acquire new instances from the cloud provider
- * if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the
+ * Creates a new Spark cluster. This method will acquire new instances from the cloud provider if
+ * necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the
* cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The
* cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to
* acquire some of the requested nodes, due to cloud provider limitations (account limits, spot
@@ -191,11 +187,9 @@ public Wait Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously.
- * Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster
- * is already in a `TERMINATING` or `TERMINATED` state, nothing will happen.
+ * Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once
+ * the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is
+ * already in a `TERMINATING` or `TERMINATED` state, nothing will happen.
*/
public Wait Updates the configuration of a cluster to match the provided attributes and size. A cluster
- * can be updated if it is in a `RUNNING` or `TERMINATED` state.
+ * Updates the configuration of a cluster to match the provided attributes and size. A cluster can
+ * be updated if it is in a `RUNNING` or `TERMINATED` state.
*
* If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new
* attributes can take effect.
@@ -234,10 +226,8 @@ public Iterable Retrieves a list of events about the activity of a cluster. This API is paginated. If there
- * are more events to read, the response includes all the parameters necessary to request the next
+ * Retrieves a list of events about the activity of a cluster. This API is paginated. If there are
+ * more events to read, the response includes all the parameters necessary to request the next
* page of events.
*/
public Iterable Retrieves the information for a cluster given its identifier. Clusters can be described
- * while they are running, or up to 60 days after they are terminated.
+ * Retrieves the information for a cluster given its identifier. Clusters can be described while
+ * they are running, or up to 60 days after they are terminated.
*/
public ClusterDetails get(GetClusterRequest request) {
return impl.get(request);
@@ -263,11 +251,7 @@ public GetClusterPermissionLevelsResponse getPermissionLevels(String clusterId)
return getPermissionLevels(new GetClusterPermissionLevelsRequest().setClusterId(clusterId));
}
- /**
- * Get cluster permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
public GetClusterPermissionLevelsResponse getPermissionLevels(
GetClusterPermissionLevelsRequest request) {
return impl.getPermissionLevels(request);
@@ -277,20 +261,14 @@ public ClusterPermissions getPermissions(String clusterId) {
return getPermissions(new GetClusterPermissionsRequest().setClusterId(clusterId));
}
- /**
- * Get cluster permissions.
- *
- * Gets the permissions of a cluster. Clusters can inherit permissions from their root object.
- */
+ /** Gets the permissions of a cluster. Clusters can inherit permissions from their root object. */
public ClusterPermissions getPermissions(GetClusterPermissionsRequest request) {
return impl.getPermissions(request);
}
/**
- * List clusters.
- *
- * Return information about all pinned and active clusters, and all clusters terminated within
- * the last 30 days. Clusters terminated prior to this period are not included.
+ * Return information about all pinned and active clusters, and all clusters terminated within the
+ * last 30 days. Clusters terminated prior to this period are not included.
*/
public Iterable Returns a list of supported Spark node types. These node types can be used to launch a
- * cluster.
+ * Returns a list of supported Spark node types. These node types can be used to launch a cluster.
*/
public ListNodeTypesResponse listNodeTypes() {
return impl.listNodeTypes();
}
/**
- * List availability zones.
- *
- * Returns a list of availability zones where clusters can be created in (For example,
+ * Returns a list of availability zones where clusters can be created in (For example,
* us-west-2a). These zones can be used to launch a cluster.
*/
public ListAvailableZonesResponse listZones() {
@@ -331,9 +304,7 @@ public void permanentDelete(String clusterId) {
}
/**
- * Permanently delete cluster.
- *
- * Permanently deletes a Spark cluster. This cluster is terminated and resources are
+ * Permanently deletes a Spark cluster. This cluster is terminated and resources are
* asynchronously removed.
*
* In addition, users will no longer see permanently deleted clusters in the cluster list, and
@@ -348,9 +319,7 @@ public void pin(String clusterId) {
}
/**
- * Pin cluster.
- *
- * Pinning a cluster ensures that the cluster will always be returned by the ListClusters API.
+ * Pinning a cluster ensures that the cluster will always be returned by the ListClusters API.
* Pinning a cluster that is already pinned will have no effect. This API can only be called by
* workspace admins.
*/
@@ -363,10 +332,8 @@ public Wait Resizes a cluster to have a desired number of workers. This will fail unless the cluster is
- * in a `RUNNING` state.
+ * Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in
+ * a `RUNNING` state.
*/
public Wait Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a
- * `RUNNING` state, nothing will happen.
+ * Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING`
+ * state, nothing will happen.
*/
public Wait Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
public ClusterPermissions setPermissions(ClusterPermissionsRequest request) {
return impl.setPermissions(request);
}
/**
- * List available Spark versions.
- *
- * Returns the list of available Spark versions. These versions can be used to launch a
- * cluster.
+ * Returns the list of available Spark versions. These versions can be used to launch a cluster.
*/
public GetSparkVersionsResponse sparkVersions() {
return impl.sparkVersions();
@@ -420,14 +379,11 @@ public Wait Starts a terminated Spark cluster with the supplied ID. This works similar to
- * `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster
- * starts with the last specified cluster size. - If the previous cluster was an autoscaling
- * cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not
- * currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job
- * cannot be started.
+ * Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster`
+ * except: - The previous cluster id and attributes are preserved. - The cluster starts with the
+ * last specified cluster size. - If the previous cluster was an autoscaling cluster, the current
+ * cluster starts with the minimum number of nodes. - If the cluster is not currently in a
+ * ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job cannot be started.
*/
public Wait Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters
- * API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by
+ * Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API.
+ * Unpinning a cluster that is not pinned will have no effect. This API can only be called by
* workspace admins.
*/
public void unpin(UnpinCluster request) {
@@ -455,11 +409,9 @@ public Wait Updates the configuration of a cluster to match the partial set of attributes and size.
- * Denote which fields to update using the `update_mask` field in the request body. A cluster can
- * be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a
+ * Updates the configuration of a cluster to match the partial set of attributes and size. Denote
+ * which fields to update using the `update_mask` field in the request body. A cluster can be
+ * updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a
* `RUNNING` state, it will be restarted so that the new attributes can take effect. If a cluster
* is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The updated attributes
* will take effect the next time the cluster is started using the `clusters/start` API. Attempts
@@ -477,10 +429,7 @@ public ClusterPermissions updatePermissions(String clusterId) {
}
/**
- * Update cluster permissions.
- *
- * Updates the permissions on a cluster. Clusters can inherit permissions from their root
- * object.
+ * Updates the permissions on a cluster. Clusters can inherit permissions from their root object.
*/
public ClusterPermissions updatePermissions(ClusterPermissionsRequest request) {
return impl.updatePermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java
index 48bdd74a0..f0fa9c0a6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java
@@ -33,19 +33,15 @@
@Generated
public interface ClustersService {
/**
- * Change cluster owner.
- *
- * Change the owner of the cluster. You must be an admin and the cluster must be terminated to
+ * Change the owner of the cluster. You must be an admin and the cluster must be terminated to
* perform this operation. The service principal application ID can be supplied as an argument to
* `owner_username`.
*/
void changeOwner(ChangeClusterOwner changeClusterOwner);
/**
- * Create new cluster.
- *
- * Creates a new Spark cluster. This method will acquire new instances from the cloud provider
- * if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the
+ * Creates a new Spark cluster. This method will acquire new instances from the cloud provider if
+ * necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the
* cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The
* cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to
* acquire some of the requested nodes, due to cloud provider limitations (account limits, spot
@@ -62,19 +58,15 @@ public interface ClustersService {
CreateClusterResponse create(CreateCluster createCluster);
/**
- * Terminate cluster.
- *
- * Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously.
- * Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster
- * is already in a `TERMINATING` or `TERMINATED` state, nothing will happen.
+ * Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once
+ * the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is
+ * already in a `TERMINATING` or `TERMINATED` state, nothing will happen.
*/
void delete(DeleteCluster deleteCluster);
/**
- * Update cluster configuration.
- *
- * Updates the configuration of a cluster to match the provided attributes and size. A cluster
- * can be updated if it is in a `RUNNING` or `TERMINATED` state.
+ * Updates the configuration of a cluster to match the provided attributes and size. A cluster can
+ * be updated if it is in a `RUNNING` or `TERMINATED` state.
*
* If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new
* attributes can take effect.
@@ -89,65 +81,44 @@ public interface ClustersService {
void edit(EditCluster editCluster);
/**
- * List cluster activity events.
- *
- * Retrieves a list of events about the activity of a cluster. This API is paginated. If there
- * are more events to read, the response includes all the parameters necessary to request the next
+ * Retrieves a list of events about the activity of a cluster. This API is paginated. If there are
+ * more events to read, the response includes all the parameters necessary to request the next
* page of events.
*/
GetEventsResponse events(GetEvents getEvents);
/**
- * Get cluster info.
- *
- * Retrieves the information for a cluster given its identifier. Clusters can be described
- * while they are running, or up to 60 days after they are terminated.
+ * Retrieves the information for a cluster given its identifier. Clusters can be described while
+ * they are running, or up to 60 days after they are terminated.
*/
ClusterDetails get(GetClusterRequest getClusterRequest);
- /**
- * Get cluster permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
GetClusterPermissionLevelsResponse getPermissionLevels(
GetClusterPermissionLevelsRequest getClusterPermissionLevelsRequest);
- /**
- * Get cluster permissions.
- *
- * Gets the permissions of a cluster. Clusters can inherit permissions from their root object.
- */
+ /** Gets the permissions of a cluster. Clusters can inherit permissions from their root object. */
ClusterPermissions getPermissions(GetClusterPermissionsRequest getClusterPermissionsRequest);
/**
- * List clusters.
- *
- * Return information about all pinned and active clusters, and all clusters terminated within
- * the last 30 days. Clusters terminated prior to this period are not included.
+ * Return information about all pinned and active clusters, and all clusters terminated within the
+ * last 30 days. Clusters terminated prior to this period are not included.
*/
ListClustersResponse list(ListClustersRequest listClustersRequest);
/**
- * List node types.
- *
- * Returns a list of supported Spark node types. These node types can be used to launch a
- * cluster.
+ * Returns a list of supported Spark node types. These node types can be used to launch a cluster.
*/
ListNodeTypesResponse listNodeTypes();
/**
- * List availability zones.
- *
- * Returns a list of availability zones where clusters can be created in (For example,
+ * Returns a list of availability zones where clusters can be created in (For example,
* us-west-2a). These zones can be used to launch a cluster.
*/
ListAvailableZonesResponse listZones();
/**
- * Permanently delete cluster.
- *
- * Permanently deletes a Spark cluster. This cluster is terminated and resources are
+ * Permanently deletes a Spark cluster. This cluster is terminated and resources are
* asynchronously removed.
*
* In addition, users will no longer see permanently deleted clusters in the cluster list, and
@@ -156,74 +127,55 @@ GetClusterPermissionLevelsResponse getPermissionLevels(
void permanentDelete(PermanentDeleteCluster permanentDeleteCluster);
/**
- * Pin cluster.
- *
- * Pinning a cluster ensures that the cluster will always be returned by the ListClusters API.
+ * Pinning a cluster ensures that the cluster will always be returned by the ListClusters API.
* Pinning a cluster that is already pinned will have no effect. This API can only be called by
* workspace admins.
*/
void pin(PinCluster pinCluster);
/**
- * Resize cluster.
- *
- * Resizes a cluster to have a desired number of workers. This will fail unless the cluster is
- * in a `RUNNING` state.
+ * Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in
+ * a `RUNNING` state.
*/
void resize(ResizeCluster resizeCluster);
/**
- * Restart cluster.
- *
- * Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a
- * `RUNNING` state, nothing will happen.
+ * Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING`
+ * state, nothing will happen.
*/
void restart(RestartCluster restartCluster);
/**
- * Set cluster permissions.
- *
- * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
ClusterPermissions setPermissions(ClusterPermissionsRequest clusterPermissionsRequest);
/**
- * List available Spark versions.
- *
- * Returns the list of available Spark versions. These versions can be used to launch a
- * cluster.
+ * Returns the list of available Spark versions. These versions can be used to launch a cluster.
*/
GetSparkVersionsResponse sparkVersions();
/**
- * Start terminated cluster.
- *
- * Starts a terminated Spark cluster with the supplied ID. This works similar to
- * `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster
- * starts with the last specified cluster size. - If the previous cluster was an autoscaling
- * cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not
- * currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job
- * cannot be started.
+ * Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster`
+ * except: - The previous cluster id and attributes are preserved. - The cluster starts with the
+ * last specified cluster size. - If the previous cluster was an autoscaling cluster, the current
+ * cluster starts with the minimum number of nodes. - If the cluster is not currently in a
+ * ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job cannot be started.
*/
void start(StartCluster startCluster);
/**
- * Unpin cluster.
- *
- * Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters
- * API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by
+ * Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API.
+ * Unpinning a cluster that is not pinned will have no effect. This API can only be called by
* workspace admins.
*/
void unpin(UnpinCluster unpinCluster);
/**
- * Update cluster configuration (partial).
- *
- * Updates the configuration of a cluster to match the partial set of attributes and size.
- * Denote which fields to update using the `update_mask` field in the request body. A cluster can
- * be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a
+ * Updates the configuration of a cluster to match the partial set of attributes and size. Denote
+ * which fields to update using the `update_mask` field in the request body. A cluster can be
+ * updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a
* `RUNNING` state, it will be restarted so that the new attributes can take effect. If a cluster
* is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The updated attributes
* will take effect the next time the cluster is started using the `clusters/start` API. Attempts
@@ -233,10 +185,7 @@ GetClusterPermissionLevelsResponse getPermissionLevels(
void update(UpdateCluster updateCluster);
/**
- * Update cluster permissions.
- *
- * Updates the permissions on a cluster. Clusters can inherit permissions from their root
- * object.
+ * Updates the permissions on a cluster. Clusters can inherit permissions from their root object.
*/
ClusterPermissions updatePermissions(ClusterPermissionsRequest clusterPermissionsRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
index 886970e07..e4bf62f0f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
@@ -206,9 +206,7 @@ public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError(
}
/**
- * Cancel a command.
- *
- * Cancels a currently running command within an execution context.
+ * Cancels a currently running command within an execution context.
*
* The command ID is obtained from a prior successful call to __execute__.
*/
@@ -233,9 +231,7 @@ public CommandStatusResponse commandStatus(String clusterId, String contextId, S
}
/**
- * Get command info.
- *
- * Gets the status of and, if available, the results from a currently executing command.
+ * Gets the status of and, if available, the results from a currently executing command.
*
* The command ID is obtained from a prior successful call to __execute__.
*/
@@ -248,19 +244,13 @@ public ContextStatusResponse contextStatus(String clusterId, String contextId) {
new ContextStatusRequest().setClusterId(clusterId).setContextId(contextId));
}
- /**
- * Get status.
- *
- * Gets the status for an execution context.
- */
+ /** Gets the status for an execution context. */
public ContextStatusResponse contextStatus(ContextStatusRequest request) {
return impl.contextStatus(request);
}
/**
- * Create an execution context.
- *
- * Creates an execution context for running cluster commands.
+ * Creates an execution context for running cluster commands.
*
* If successful, this method returns the ID of the new execution context.
*/
@@ -277,19 +267,13 @@ public void destroy(String clusterId, String contextId) {
destroy(new DestroyContext().setClusterId(clusterId).setContextId(contextId));
}
- /**
- * Delete an execution context.
- *
- * Deletes an execution context.
- */
+ /** Deletes an execution context. */
public void destroy(DestroyContext request) {
impl.destroy(request);
}
/**
- * Run a command.
- *
- * Runs a cluster command in the given execution context, using the provided language.
+ * Runs a cluster command in the given execution context, using the provided language.
*
* If successful, it returns an ID for tracking the status of the command's execution.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionService.java
index b06c8a2a5..a28c23aaf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionService.java
@@ -14,50 +14,34 @@
@Generated
public interface CommandExecutionService {
/**
- * Cancel a command.
- *
- * Cancels a currently running command within an execution context.
+ * Cancels a currently running command within an execution context.
*
* The command ID is obtained from a prior successful call to __execute__.
*/
void cancel(CancelCommand cancelCommand);
/**
- * Get command info.
- *
- * Gets the status of and, if available, the results from a currently executing command.
+ * Gets the status of and, if available, the results from a currently executing command.
*
* The command ID is obtained from a prior successful call to __execute__.
*/
CommandStatusResponse commandStatus(CommandStatusRequest commandStatusRequest);
- /**
- * Get status.
- *
- * Gets the status for an execution context.
- */
+ /** Gets the status for an execution context. */
ContextStatusResponse contextStatus(ContextStatusRequest contextStatusRequest);
/**
- * Create an execution context.
- *
- * Creates an execution context for running cluster commands.
+ * Creates an execution context for running cluster commands.
*
* If successful, this method returns the ID of the new execution context.
*/
Created create(CreateContext createContext);
- /**
- * Delete an execution context.
- *
- * Deletes an execution context.
- */
+ /** Deletes an execution context. */
void destroy(DestroyContext destroyContext);
/**
- * Run a command.
- *
- * Runs a cluster command in the given execution context, using the provided language.
+ * Runs a cluster command in the given execution context, using the provided language.
*
* If successful, it returns an ID for tracking the status of the command's execution.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java
index effb7c1a1..24fa19723 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get command info */
@Generated
public class CommandStatusRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java
index 1da091984..f3219d506 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get status */
@Generated
public class ContextStatusRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
index 79853eda0..b860190d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
@@ -221,6 +221,13 @@ public class CreateCluster {
@JsonProperty("policy_id")
private String policyId;
+ /**
+ * If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only
+ * supported for GCP HYPERDISK_BALANCED disks.
+ */
+ @JsonProperty("remote_disk_throughput")
+ private Long remoteDiskThroughput;
+
/**
* Determines the cluster's runtime engine, either standard or Photon.
*
@@ -276,6 +283,13 @@ public class CreateCluster {
@JsonProperty("ssh_public_keys")
private Collection Creates a new global init script in this workspace.
- */
+ /** Creates a new global init script in this workspace. */
public CreateResponse create(GlobalInitScriptCreateRequest request) {
return impl.create(request);
}
@@ -49,11 +45,7 @@ public void delete(String scriptId) {
delete(new DeleteGlobalInitScriptRequest().setScriptId(scriptId));
}
- /**
- * Delete init script.
- *
- * Deletes a global init script.
- */
+ /** Deletes a global init script. */
public void delete(DeleteGlobalInitScriptRequest request) {
impl.delete(request);
}
@@ -62,21 +54,15 @@ public GlobalInitScriptDetailsWithContent get(String scriptId) {
return get(new GetGlobalInitScriptRequest().setScriptId(scriptId));
}
- /**
- * Get an init script.
- *
- * Gets all the details of a script, including its Base64-encoded contents.
- */
+ /** Gets all the details of a script, including its Base64-encoded contents. */
public GlobalInitScriptDetailsWithContent get(GetGlobalInitScriptRequest request) {
return impl.get(request);
}
/**
- * Get init scripts.
- *
- * Get a list of all global init scripts for this workspace. This returns all properties for
- * each script but **not** the script contents. To retrieve the contents of a script, use the [get
- * a global init script](:method:globalinitscripts/get) operation.
+ * Get a list of all global init scripts for this workspace. This returns all properties for each
+ * script but **not** the script contents. To retrieve the contents of a script, use the [get a
+ * global init script](:method:globalinitscripts/get) operation.
*/
public Iterable Updates a global init script, specifying only the fields to change. All fields are optional.
+ * Updates a global init script, specifying only the fields to change. All fields are optional.
* Unspecified fields retain their current value.
*/
public void update(GlobalInitScriptUpdateRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java
index 2a8b7f429..b031af2ef 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java
@@ -18,40 +18,24 @@
*/
@Generated
public interface GlobalInitScriptsService {
- /**
- * Create init script.
- *
- * Creates a new global init script in this workspace.
- */
+ /** Creates a new global init script in this workspace. */
CreateResponse create(GlobalInitScriptCreateRequest globalInitScriptCreateRequest);
- /**
- * Delete init script.
- *
- * Deletes a global init script.
- */
+ /** Deletes a global init script. */
void delete(DeleteGlobalInitScriptRequest deleteGlobalInitScriptRequest);
- /**
- * Get an init script.
- *
- * Gets all the details of a script, including its Base64-encoded contents.
- */
+ /** Gets all the details of a script, including its Base64-encoded contents. */
GlobalInitScriptDetailsWithContent get(GetGlobalInitScriptRequest getGlobalInitScriptRequest);
/**
- * Get init scripts.
- *
- * Get a list of all global init scripts for this workspace. This returns all properties for
- * each script but **not** the script contents. To retrieve the contents of a script, use the [get
- * a global init script](:method:globalinitscripts/get) operation.
+ * Get a list of all global init scripts for this workspace. This returns all properties for each
+ * script but **not** the script contents. To retrieve the contents of a script, use the [get a
+ * global init script](:method:globalinitscripts/get) operation.
*/
ListGlobalInitScriptsResponse list();
/**
- * Update init script.
- *
- * Updates a global init script, specifying only the fields to change. All fields are optional.
+ * Updates a global init script, specifying only the fields to change. All fields are optional.
* Unspecified fields retain their current value.
*/
void update(GlobalInitScriptUpdateRequest globalInitScriptUpdateRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
index f2fd58676..485798092 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
@@ -122,6 +122,13 @@ public class InstancePoolAndStats {
@JsonProperty("preloaded_spark_versions")
private Collection Creates a new instance pool using idle and ready-to-use cloud instances.
- */
+ /** Creates a new instance pool using idle and ready-to-use cloud instances. */
public CreateInstancePoolResponse create(CreateInstancePool request) {
return impl.create(request);
}
@@ -59,9 +55,7 @@ public void delete(String instancePoolId) {
}
/**
- * Delete an instance pool.
- *
- * Deletes the instance pool permanently. The idle instances in the pool are terminated
+ * Deletes the instance pool permanently. The idle instances in the pool are terminated
* asynchronously.
*/
public void delete(DeleteInstancePool request) {
@@ -76,11 +70,7 @@ public void edit(String instancePoolId, String instancePoolName, String nodeType
.setNodeTypeId(nodeTypeId));
}
- /**
- * Edit an existing instance pool.
- *
- * Modifies the configuration of an existing instance pool.
- */
+ /** Modifies the configuration of an existing instance pool. */
public void edit(EditInstancePool request) {
impl.edit(request);
}
@@ -89,11 +79,7 @@ public GetInstancePool get(String instancePoolId) {
return get(new GetInstancePoolRequest().setInstancePoolId(instancePoolId));
}
- /**
- * Get instance pool information.
- *
- * Retrieve the information for an instance pool based on its identifier.
- */
+ /** Retrieve the information for an instance pool based on its identifier. */
public GetInstancePool get(GetInstancePoolRequest request) {
return impl.get(request);
}
@@ -103,11 +89,7 @@ public GetInstancePoolPermissionLevelsResponse getPermissionLevels(String instan
new GetInstancePoolPermissionLevelsRequest().setInstancePoolId(instancePoolId));
}
- /**
- * Get instance pool permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
public GetInstancePoolPermissionLevelsResponse getPermissionLevels(
GetInstancePoolPermissionLevelsRequest request) {
return impl.getPermissionLevels(request);
@@ -119,20 +101,14 @@ public InstancePoolPermissions getPermissions(String instancePoolId) {
}
/**
- * Get instance pool permissions.
- *
- * Gets the permissions of an instance pool. Instance pools can inherit permissions from their
+ * Gets the permissions of an instance pool. Instance pools can inherit permissions from their
* root object.
*/
public InstancePoolPermissions getPermissions(GetInstancePoolPermissionsRequest request) {
return impl.getPermissions(request);
}
- /**
- * List instance pool info.
- *
- * Gets a list of instance pools with their statistics.
- */
+ /** Gets a list of instance pools with their statistics. */
public Iterable Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
public InstancePoolPermissions setPermissions(InstancePoolPermissionsRequest request) {
return impl.setPermissions(request);
@@ -159,10 +132,8 @@ public InstancePoolPermissions updatePermissions(String instancePoolId) {
}
/**
- * Update instance pool permissions.
- *
- * Updates the permissions on an instance pool. Instance pools can inherit permissions from
- * their root object.
+ * Updates the permissions on an instance pool. Instance pools can inherit permissions from their
+ * root object.
*/
public InstancePoolPermissions updatePermissions(InstancePoolPermissionsRequest request) {
return impl.updatePermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java
index 0a7d03ead..4b41a1123 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java
@@ -26,74 +26,45 @@
*/
@Generated
public interface InstancePoolsService {
- /**
- * Create a new instance pool.
- *
- * Creates a new instance pool using idle and ready-to-use cloud instances.
- */
+ /** Creates a new instance pool using idle and ready-to-use cloud instances. */
CreateInstancePoolResponse create(CreateInstancePool createInstancePool);
/**
- * Delete an instance pool.
- *
- * Deletes the instance pool permanently. The idle instances in the pool are terminated
+ * Deletes the instance pool permanently. The idle instances in the pool are terminated
* asynchronously.
*/
void delete(DeleteInstancePool deleteInstancePool);
- /**
- * Edit an existing instance pool.
- *
- * Modifies the configuration of an existing instance pool.
- */
+ /** Modifies the configuration of an existing instance pool. */
void edit(EditInstancePool editInstancePool);
- /**
- * Get instance pool information.
- *
- * Retrieve the information for an instance pool based on its identifier.
- */
+ /** Retrieve the information for an instance pool based on its identifier. */
GetInstancePool get(GetInstancePoolRequest getInstancePoolRequest);
- /**
- * Get instance pool permission levels.
- *
- * Gets the permission levels that a user can have on an object.
- */
+ /** Gets the permission levels that a user can have on an object. */
GetInstancePoolPermissionLevelsResponse getPermissionLevels(
GetInstancePoolPermissionLevelsRequest getInstancePoolPermissionLevelsRequest);
/**
- * Get instance pool permissions.
- *
- * Gets the permissions of an instance pool. Instance pools can inherit permissions from their
+ * Gets the permissions of an instance pool. Instance pools can inherit permissions from their
* root object.
*/
InstancePoolPermissions getPermissions(
GetInstancePoolPermissionsRequest getInstancePoolPermissionsRequest);
- /**
- * List instance pool info.
- *
- * Gets a list of instance pools with their statistics.
- */
+ /** Gets a list of instance pools with their statistics. */
ListInstancePools list();
/**
- * Set instance pool permissions.
- *
- * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
- * direct permissions if none are specified. Objects can inherit permissions from their root
- * object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+ * permissions if none are specified. Objects can inherit permissions from their root object.
*/
InstancePoolPermissions setPermissions(
InstancePoolPermissionsRequest instancePoolPermissionsRequest);
/**
- * Update instance pool permissions.
- *
- * Updates the permissions on an instance pool. Instance pools can inherit permissions from
- * their root object.
+ * Updates the permissions on an instance pool. Instance pools can inherit permissions from their
+ * root object.
*/
InstancePoolPermissions updatePermissions(
InstancePoolPermissionsRequest instancePoolPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
index 89729b951..d5ca075b7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
@@ -36,10 +36,8 @@ public void add(String instanceProfileArn) {
}
/**
- * Register an instance profile.
- *
- * Registers an instance profile in Databricks. In the UI, you can then give users the
- * permission to use this instance profile when launching clusters.
+ * Registers an instance profile in Databricks. In the UI, you can then give users the permission
+ * to use this instance profile when launching clusters.
*
* This API is only available to admin users.
*/
@@ -52,9 +50,7 @@ public void edit(String instanceProfileArn) {
}
/**
- * Edit an instance profile.
- *
- * The only supported field to change is the optional IAM role ARN associated with the instance
+ * The only supported field to change is the optional IAM role ARN associated with the instance
* profile. It is required to specify the IAM role ARN if both of the following are true:
*
* * Your role name and instance profile name do not match. The name is the part after the last
@@ -73,9 +69,7 @@ public void edit(InstanceProfile request) {
}
/**
- * List available instance profiles.
- *
- * List the instance profiles that the calling user can use to launch a cluster.
+ * List the instance profiles that the calling user can use to launch a cluster.
*
* This API is available to all users.
*/
@@ -92,10 +86,8 @@ public void remove(String instanceProfileArn) {
}
/**
- * Remove the instance profile.
- *
- * Remove the instance profile with the provided ARN. Existing clusters with this instance
- * profile will continue to function.
+ * Remove the instance profile with the provided ARN. Existing clusters with this instance profile
+ * will continue to function.
*
* This API is only accessible to admin users.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java
index eb7e78c1d..e5b4d304f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesService.java
@@ -18,19 +18,15 @@
@Generated
public interface InstanceProfilesService {
/**
- * Register an instance profile.
- *
- * Registers an instance profile in Databricks. In the UI, you can then give users the
- * permission to use this instance profile when launching clusters.
+ * Registers an instance profile in Databricks. In the UI, you can then give users the permission
+ * to use this instance profile when launching clusters.
*
* This API is only available to admin users.
*/
void add(AddInstanceProfile addInstanceProfile);
/**
- * Edit an instance profile.
- *
- * The only supported field to change is the optional IAM role ARN associated with the instance
+ * The only supported field to change is the optional IAM role ARN associated with the instance
* profile. It is required to specify the IAM role ARN if both of the following are true:
*
* * Your role name and instance profile name do not match. The name is the part after the last
@@ -47,19 +43,15 @@ public interface InstanceProfilesService {
void edit(InstanceProfile instanceProfile);
/**
- * List available instance profiles.
- *
- * List the instance profiles that the calling user can use to launch a cluster.
+ * List the instance profiles that the calling user can use to launch a cluster.
*
* This API is available to all users.
*/
ListInstanceProfilesResponse list();
/**
- * Remove the instance profile.
- *
- * Remove the instance profile with the provided ARN. Existing clusters with this instance
- * profile will continue to function.
+ * Remove the instance profile with the provided ARN. Existing clusters with this instance profile
+ * will continue to function.
*
* This API is only accessible to admin users.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Language.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Language.java
index 9dd89c2c3..e8b677236 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Language.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Language.java
@@ -10,6 +10,9 @@ public enum Language {
@JsonProperty("python")
PYTHON,
+ @JsonProperty("r")
+ R,
+
@JsonProperty("scala")
SCALA,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
index a65b5f6b0..e1495320b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
@@ -42,9 +42,7 @@ public LibrariesAPI(LibrariesService mock) {
}
/**
- * Get all statuses.
- *
- * Get the status of all libraries on all clusters. A status is returned for all libraries
+ * Get the status of all libraries on all clusters. A status is returned for all libraries
* installed on this cluster via the API or the libraries UI.
*/
public Iterable Get the status of libraries on a cluster. A status is returned for all libraries installed
- * on this cluster via the API or the libraries UI. The order of returned libraries is as follows:
- * 1. Libraries set to be installed on this cluster, in the order that the libraries were added to
+ * Get the status of libraries on a cluster. A status is returned for all libraries installed on
+ * this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1.
+ * Libraries set to be installed on this cluster, in the order that the libraries were added to
* the cluster, are returned first. 2. Libraries that were previously requested to be installed on
* this cluster or, but are now marked for removal, in no particular order, are returned last.
*/
@@ -78,9 +74,7 @@ public void install(String clusterId, Collection Add libraries to install on a cluster. The installation is asynchronous; it happens in the
+ * Add libraries to install on a cluster. The installation is asynchronous; it happens in the
* background after the completion of this request.
*/
public void install(InstallLibraries request) {
@@ -92,11 +86,8 @@ public void uninstall(String clusterId, Collection Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the
- * cluster is restarted. A request to uninstall a library that is not currently installed is
- * ignored.
+ * Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster
+ * is restarted. A request to uninstall a library that is not currently installed is ignored.
*/
public void uninstall(UninstallLibraries request) {
impl.uninstall(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
index 58e5a51fd..5ccaf55cb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
@@ -27,38 +27,29 @@
@Generated
public interface LibrariesService {
/**
- * Get all statuses.
- *
- * Get the status of all libraries on all clusters. A status is returned for all libraries
+ * Get the status of all libraries on all clusters. A status is returned for all libraries
* installed on this cluster via the API or the libraries UI.
*/
ListAllClusterLibraryStatusesResponse allClusterStatuses();
/**
- * Get status.
- *
- * Get the status of libraries on a cluster. A status is returned for all libraries installed
- * on this cluster via the API or the libraries UI. The order of returned libraries is as follows:
- * 1. Libraries set to be installed on this cluster, in the order that the libraries were added to
+ * Get the status of libraries on a cluster. A status is returned for all libraries installed on
+ * this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1.
+ * Libraries set to be installed on this cluster, in the order that the libraries were added to
* the cluster, are returned first. 2. Libraries that were previously requested to be installed on
* this cluster or, but are now marked for removal, in no particular order, are returned last.
*/
ClusterLibraryStatuses clusterStatus(ClusterStatus clusterStatus);
/**
- * Add a library.
- *
- * Add libraries to install on a cluster. The installation is asynchronous; it happens in the
+ * Add libraries to install on a cluster. The installation is asynchronous; it happens in the
* background after the completion of this request.
*/
void install(InstallLibraries installLibraries);
/**
- * Uninstall libraries.
- *
- * Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the
- * cluster is restarted. A request to uninstall a library that is not currently installed is
- * ignored.
+ * Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster
+ * is restarted. A request to uninstall a library that is not currently installed is ignored.
*/
void uninstall(UninstallLibraries uninstallLibraries);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java
index 1347905b7..33e7f38f8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterCompliancesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List cluster policy compliance */
@Generated
public class ListClusterCompliancesRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java
index fa8edb3dc..ee797f3c2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List cluster policies */
@Generated
public class ListClusterPoliciesRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java
index 7139f7763..519c5f0de 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List clusters */
@Generated
public class ListClustersRequest {
/** Filters to apply to the list of clusters. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java
index f267ac127..b26f40156 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListPolicyFamiliesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List policy families */
@Generated
public class ListPolicyFamiliesRequest {
/** Maximum number of policy families to return. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
index 7af090378..565c943ee 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
@@ -40,9 +40,7 @@ public EnforceClusterComplianceResponse enforceCompliance(String clusterId) {
}
/**
- * Enforce cluster policy compliance.
- *
- * Updates a cluster to be compliant with the current version of its policy. A cluster can be
+ * Updates a cluster to be compliant with the current version of its policy. A cluster can be
* updated if it is in a `RUNNING` or `TERMINATED` state.
*
* If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new
@@ -64,10 +62,8 @@ public GetClusterComplianceResponse getCompliance(String clusterId) {
}
/**
- * Get cluster policy compliance.
- *
- * Returns the policy compliance status of a cluster. Clusters could be out of compliance if
- * their policy was updated after the cluster was last edited.
+ * Returns the policy compliance status of a cluster. Clusters could be out of compliance if their
+ * policy was updated after the cluster was last edited.
*/
public GetClusterComplianceResponse getCompliance(GetClusterComplianceRequest request) {
return impl.getCompliance(request);
@@ -78,10 +74,8 @@ public Iterable Returns the policy compliance status of all clusters that use a given policy. Clusters could
- * be out of compliance if their policy was updated after the cluster was last edited.
+ * Returns the policy compliance status of all clusters that use a given policy. Clusters could be
+ * out of compliance if their policy was updated after the cluster was last edited.
*/
public Iterable Updates a cluster to be compliant with the current version of its policy. A cluster can be
+ * Updates a cluster to be compliant with the current version of its policy. A cluster can be
* updated if it is in a `RUNNING` or `TERMINATED` state.
*
* If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new
@@ -40,19 +38,15 @@ EnforceClusterComplianceResponse enforceCompliance(
EnforceClusterComplianceRequest enforceClusterComplianceRequest);
/**
- * Get cluster policy compliance.
- *
- * Returns the policy compliance status of a cluster. Clusters could be out of compliance if
- * their policy was updated after the cluster was last edited.
+ * Returns the policy compliance status of a cluster. Clusters could be out of compliance if their
+ * policy was updated after the cluster was last edited.
*/
GetClusterComplianceResponse getCompliance(
GetClusterComplianceRequest getClusterComplianceRequest);
/**
- * List cluster policy compliance.
- *
- * Returns the policy compliance status of all clusters that use a given policy. Clusters could
- * be out of compliance if their policy was updated after the cluster was last edited.
+ * Returns the policy compliance status of all clusters that use a given policy. Clusters could be
+ * out of compliance if their policy was updated after the cluster was last edited.
*/
ListClusterCompliancesResponse listCompliance(
ListClusterCompliancesRequest listClusterCompliancesRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java
index d31df6eaf..a6c49c7fb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesAPI.java
@@ -38,20 +38,14 @@ public PolicyFamily get(String policyFamilyId) {
return get(new GetPolicyFamilyRequest().setPolicyFamilyId(policyFamilyId));
}
- /**
- * Get policy family information.
- *
- * Retrieve the information for an policy family based on its identifier and version
- */
+ /** Retrieve the information for an policy family based on its identifier and version */
public PolicyFamily get(GetPolicyFamilyRequest request) {
return impl.get(request);
}
/**
- * List policy families.
- *
- * Returns the list of policy definition types available to use at their latest version. This
- * API is paginated.
+ * Returns the list of policy definition types available to use at their latest version. This API
+ * is paginated.
*/
public Iterable Retrieve the information for an policy family based on its identifier and version
- */
+ /** Retrieve the information for an policy family based on its identifier and version */
PolicyFamily get(GetPolicyFamilyRequest getPolicyFamilyRequest);
/**
- * List policy families.
- *
- * Returns the list of policy definition types available to use at their latest version. This
- * API is paginated.
+ * Returns the list of policy definition types available to use at their latest version. This API
+ * is paginated.
*/
ListPolicyFamiliesResponse list(ListPolicyFamiliesRequest listPolicyFamiliesRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java
index 151d44359..9471ef127 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java
@@ -207,6 +207,13 @@ public class UpdateClusterResource {
@JsonProperty("policy_id")
private String policyId;
+ /**
+ * If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only
+ * supported for GCP HYPERDISK_BALANCED disks.
+ */
+ @JsonProperty("remote_disk_throughput")
+ private Long remoteDiskThroughput;
+
/**
* Determines the cluster's runtime engine, either standard or Photon.
*
@@ -262,6 +269,13 @@ public class UpdateClusterResource {
@JsonProperty("ssh_public_keys")
private Collection Create new message in a [conversation](:method:genie/startconversation). The AI response
- * uses all previously created messages in the conversation to respond.
+ * Create new message in a [conversation](:method:genie/startconversation). The AI response uses
+ * all previously created messages in the conversation to respond.
*/
public Wait Execute the SQL for a message query attachment. Use this API when the query attachment has
+ * Execute the SQL for a message query attachment. Use this API when the query attachment has
* expired and needs to be re-executed.
*/
public GenieGetMessageQueryResultResponse executeMessageAttachmentQuery(
@@ -151,73 +157,12 @@ public GenieGetMessageQueryResultResponse executeMessageQuery(
.setMessageId(messageId));
}
- /**
- * [Deprecated] Execute SQL query in a conversation message.
- *
- * Execute the SQL query in the message.
- */
+ /** Execute the SQL query in the message. */
public GenieGetMessageQueryResultResponse executeMessageQuery(
GenieExecuteMessageQueryRequest request) {
return impl.executeMessageQuery(request);
}
- public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
- String spaceId, String conversationId, String messageId, String attachmentId) {
- return generateDownloadFullQueryResult(
- new GenieGenerateDownloadFullQueryResultRequest()
- .setSpaceId(spaceId)
- .setConversationId(conversationId)
- .setMessageId(messageId)
- .setAttachmentId(attachmentId));
- }
-
- /**
- * Generate full query result download.
- *
- * Initiates a new SQL execution and returns a `download_id` that you can use to track the
- * progress of the download. The query result is stored in an external link and can be retrieved
- * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API.
- * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
- * `EXTERNAL_LINKS` disposition. See [Execute
- * Statement](:method:statementexecution/executestatement) for more details.
- */
- public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
- GenieGenerateDownloadFullQueryResultRequest request) {
- return impl.generateDownloadFullQueryResult(request);
- }
-
- public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
- String spaceId,
- String conversationId,
- String messageId,
- String attachmentId,
- String downloadId) {
- return getDownloadFullQueryResult(
- new GenieGetDownloadFullQueryResultRequest()
- .setSpaceId(spaceId)
- .setConversationId(conversationId)
- .setMessageId(messageId)
- .setAttachmentId(attachmentId)
- .setDownloadId(downloadId));
- }
-
- /**
- * Get download full query result.
- *
- * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult)
- * and successfully receiving a `download_id`, use this API to poll the download progress. When
- * the download is complete, the API returns one or more external links to the query result files.
- * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
- * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests.
- * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant
- * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement)
- * for more details.
- */
- public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
- GenieGetDownloadFullQueryResultRequest request) {
- return impl.getDownloadFullQueryResult(request);
- }
-
public GenieMessage getMessage(String spaceId, String conversationId, String messageId) {
return getMessage(
new GenieGetConversationMessageRequest()
@@ -226,11 +171,7 @@ public GenieMessage getMessage(String spaceId, String conversationId, String mes
.setMessageId(messageId));
}
- /**
- * Get conversation message.
- *
- * Get message from conversation.
- */
+ /** Get message from conversation. */
public GenieMessage getMessage(GenieGetConversationMessageRequest request) {
return impl.getMessage(request);
}
@@ -246,10 +187,8 @@ public GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult(
}
/**
- * Get message attachment SQL query result.
- *
- * Get the result of SQL query if the message has a query attachment. This is only available if
- * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
+ * Get the result of SQL query if the message has a query attachment. This is only available if a
+ * message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
*/
public GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult(
GenieGetMessageAttachmentQueryResultRequest request) {
@@ -266,10 +205,8 @@ public GenieGetMessageQueryResultResponse getMessageQueryResult(
}
/**
- * [Deprecated] Get conversation message SQL query result.
- *
- * Get the result of SQL query if the message has a query attachment. This is only available if
- * a message has a query attachment and the message status is `EXECUTING_QUERY`.
+ * Get the result of SQL query if the message has a query attachment. This is only available if a
+ * message has a query attachment and the message status is `EXECUTING_QUERY`.
*/
public GenieGetMessageQueryResultResponse getMessageQueryResult(
GenieGetMessageQueryResultRequest request) {
@@ -287,10 +224,8 @@ public GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment(
}
/**
- * [Deprecated] Get conversation message SQL query result.
- *
- * Get the result of SQL query if the message has a query attachment. This is only available if
- * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
+ * Get the result of SQL query if the message has a query attachment. This is only available if a
+ * message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
*/
public GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment(
GenieGetQueryResultByAttachmentRequest request) {
@@ -301,20 +236,21 @@ public GenieSpace getSpace(String spaceId) {
return getSpace(new GenieGetSpaceRequest().setSpaceId(spaceId));
}
- /**
- * Get Genie Space.
- *
- * Get details of a Genie Space.
- */
+ /** Get details of a Genie Space. */
public GenieSpace getSpace(GenieGetSpaceRequest request) {
return impl.getSpace(request);
}
- /**
- * List Genie spaces.
- *
- * Get list of Genie Spaces.
- */
+ public GenieListConversationsResponse listConversations(String spaceId) {
+ return listConversations(new GenieListConversationsRequest().setSpaceId(spaceId));
+ }
+
+ /** Get a list of conversations in a Genie Space. */
+ public GenieListConversationsResponse listConversations(GenieListConversationsRequest request) {
+ return impl.listConversations(request);
+ }
+
+ /** Get list of Genie Spaces. */
public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) {
return impl.listSpaces(request);
}
@@ -325,11 +261,7 @@ public Wait Start a new conversation.
- */
+ /** Start a new conversation. */
public Wait Create new message in a [conversation](:method:genie/startconversation). The AI response
- * uses all previously created messages in the conversation to respond.
+ * Create new message in a [conversation](:method:genie/startconversation). The AI response uses
+ * all previously created messages in the conversation to respond.
*/
GenieMessage createMessage(
GenieCreateConversationMessageRequest genieCreateConversationMessageRequest);
+ /** Delete a conversation. */
+ void deleteConversation(GenieDeleteConversationRequest genieDeleteConversationRequest);
+
/**
- * Execute message attachment SQL query.
- *
- * Execute the SQL for a message query attachment. Use this API when the query attachment has
+ * Execute the SQL for a message query attachment. Use this API when the query attachment has
* expired and needs to be re-executed.
*/
GenieGetMessageQueryResultResponse executeMessageAttachmentQuery(
GenieExecuteMessageAttachmentQueryRequest genieExecuteMessageAttachmentQueryRequest);
- /**
- * [Deprecated] Execute SQL query in a conversation message.
- *
- * Execute the SQL query in the message.
- */
+ /** Execute the SQL query in the message. */
GenieGetMessageQueryResultResponse executeMessageQuery(
GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest);
- /**
- * Generate full query result download.
- *
- * Initiates a new SQL execution and returns a `download_id` that you can use to track the
- * progress of the download. The query result is stored in an external link and can be retrieved
- * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API.
- * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
- * `EXTERNAL_LINKS` disposition. See [Execute
- * Statement](:method:statementexecution/executestatement) for more details.
- */
- GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
- GenieGenerateDownloadFullQueryResultRequest genieGenerateDownloadFullQueryResultRequest);
-
- /**
- * Get download full query result.
- *
- * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult)
- * and successfully receiving a `download_id`, use this API to poll the download progress. When
- * the download is complete, the API returns one or more external links to the query result files.
- * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
- * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests.
- * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant
- * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement)
- * for more details.
- */
- GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
- GenieGetDownloadFullQueryResultRequest genieGetDownloadFullQueryResultRequest);
-
- /**
- * Get conversation message.
- *
- * Get message from conversation.
- */
+ /** Get message from conversation. */
GenieMessage getMessage(GenieGetConversationMessageRequest genieGetConversationMessageRequest);
/**
- * Get message attachment SQL query result.
- *
- * Get the result of SQL query if the message has a query attachment. This is only available if
- * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
+ * Get the result of SQL query if the message has a query attachment. This is only available if a
+ * message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
*/
GenieGetMessageQueryResultResponse getMessageAttachmentQueryResult(
GenieGetMessageAttachmentQueryResultRequest genieGetMessageAttachmentQueryResultRequest);
/**
- * [Deprecated] Get conversation message SQL query result.
- *
- * Get the result of SQL query if the message has a query attachment. This is only available if
- * a message has a query attachment and the message status is `EXECUTING_QUERY`.
+ * Get the result of SQL query if the message has a query attachment. This is only available if a
+ * message has a query attachment and the message status is `EXECUTING_QUERY`.
*/
GenieGetMessageQueryResultResponse getMessageQueryResult(
GenieGetMessageQueryResultRequest genieGetMessageQueryResultRequest);
/**
- * [Deprecated] Get conversation message SQL query result.
- *
- * Get the result of SQL query if the message has a query attachment. This is only available if
- * a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
+ * Get the result of SQL query if the message has a query attachment. This is only available if a
+ * message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
*/
GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment(
GenieGetQueryResultByAttachmentRequest genieGetQueryResultByAttachmentRequest);
- /**
- * Get Genie Space.
- *
- * Get details of a Genie Space.
- */
+ /** Get details of a Genie Space. */
GenieSpace getSpace(GenieGetSpaceRequest genieGetSpaceRequest);
- /**
- * List Genie spaces.
- *
- * Get list of Genie Spaces.
- */
+ /** Get a list of conversations in a Genie Space. */
+ GenieListConversationsResponse listConversations(
+ GenieListConversationsRequest genieListConversationsRequest);
+
+ /** Get list of Genie Spaces. */
GenieListSpacesResponse listSpaces(GenieListSpacesRequest genieListSpacesRequest);
- /**
- * Start conversation.
- *
- * Start a new conversation.
- */
+ /** Start a new conversation. */
GenieStartConversationResponse startConversation(
GenieStartConversationMessageRequest genieStartConversationMessageRequest);
+
+ /** Trash a Genie Space. */
+ void trashSpace(GenieTrashSpaceRequest genieTrashSpaceRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java
new file mode 100755
index 000000000..5b27effad
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieTrashSpaceRequest {
+ /** The ID associated with the Genie space to be trashed. */
+ @JsonIgnore private String spaceId;
+
+ public GenieTrashSpaceRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieTrashSpaceRequest that = (GenieTrashSpaceRequest) o;
+ return Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieTrashSpaceRequest.class).add("spaceId", spaceId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java
index 3c623f7d4..283f0884b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get dashboard */
@Generated
public class GetDashboardRequest {
/** UUID identifying the dashboard. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java
index d35848af6..9b1cd9da0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get published dashboard */
@Generated
public class GetPublishedDashboardRequest {
/** UUID identifying the published dashboard. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java
index 6bba3f0d8..37a2db41f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Read an information of a published dashboard to mint an OAuth token. */
@Generated
public class GetPublishedDashboardTokenInfoRequest {
/** UUID identifying the published dashboard. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java
index 19c45e234..f3d3364ee 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetScheduleRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get dashboard schedule */
@Generated
public class GetScheduleRequest {
/** UUID identifying the dashboard to which the schedule belongs. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java
index 8fb7d6e6f..f924e63f4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetSubscriptionRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get schedule subscription */
@Generated
public class GetSubscriptionRequest {
/** UUID identifying the dashboard which the subscription belongs. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
index 8ceb4c401..0268f3fd3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
@@ -31,11 +31,7 @@ public Dashboard create(Dashboard dashboard) {
return create(new CreateDashboardRequest().setDashboard(dashboard));
}
- /**
- * Create dashboard.
- *
- * Create a draft dashboard.
- */
+ /** Create a draft dashboard. */
public Dashboard create(CreateDashboardRequest request) {
return impl.create(request);
}
@@ -91,11 +87,7 @@ public Dashboard get(String dashboardId) {
return get(new GetDashboardRequest().setDashboardId(dashboardId));
}
- /**
- * Get dashboard.
- *
- * Get a draft dashboard.
- */
+ /** Get a draft dashboard. */
public Dashboard get(GetDashboardRequest request) {
return impl.get(request);
}
@@ -104,11 +96,7 @@ public PublishedDashboard getPublished(String dashboardId) {
return getPublished(new GetPublishedDashboardRequest().setDashboardId(dashboardId));
}
- /**
- * Get published dashboard.
- *
- * Get the current published dashboard.
- */
+ /** Get the current published dashboard. */
public PublishedDashboard getPublished(GetPublishedDashboardRequest request) {
return impl.getPublished(request);
}
@@ -195,11 +183,7 @@ public Dashboard migrate(String sourceDashboardId) {
return migrate(new MigrateDashboardRequest().setSourceDashboardId(sourceDashboardId));
}
- /**
- * Migrate dashboard.
- *
- * Migrates a classic SQL dashboard to Lakeview.
- */
+ /** Migrates a classic SQL dashboard to Lakeview. */
public Dashboard migrate(MigrateDashboardRequest request) {
return impl.migrate(request);
}
@@ -208,11 +192,7 @@ public PublishedDashboard publish(String dashboardId) {
return publish(new PublishRequest().setDashboardId(dashboardId));
}
- /**
- * Publish dashboard.
- *
- * Publish the current draft dashboard.
- */
+ /** Publish the current draft dashboard. */
public PublishedDashboard publish(PublishRequest request) {
return impl.publish(request);
}
@@ -221,11 +201,7 @@ public void trash(String dashboardId) {
trash(new TrashDashboardRequest().setDashboardId(dashboardId));
}
- /**
- * Trash dashboard.
- *
- * Trash a dashboard.
- */
+ /** Trash a dashboard. */
public void trash(TrashDashboardRequest request) {
impl.trash(request);
}
@@ -234,11 +210,7 @@ public void unpublish(String dashboardId) {
unpublish(new UnpublishDashboardRequest().setDashboardId(dashboardId));
}
- /**
- * Unpublish dashboard.
- *
- * Unpublish the dashboard.
- */
+ /** Unpublish the dashboard. */
public void unpublish(UnpublishDashboardRequest request) {
impl.unpublish(request);
}
@@ -247,11 +219,7 @@ public Dashboard update(String dashboardId, Dashboard dashboard) {
return update(new UpdateDashboardRequest().setDashboardId(dashboardId).setDashboard(dashboard));
}
- /**
- * Update dashboard.
- *
- * Update a draft dashboard.
- */
+ /** Update a draft dashboard. */
public Dashboard update(UpdateDashboardRequest request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
index eb5fda3a4..f82cf8c5b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
@@ -29,9 +29,7 @@ public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(Str
}
/**
- * Read an information of a published dashboard to mint an OAuth token.
- *
- * Get a required authorization details and scopes of a published dashboard to mint an OAuth
+ * Get a required authorization details and scopes of a published dashboard to mint an OAuth
* token. The `authorization_details` can be enriched to apply additional restriction.
*
* Example: Adding the following `authorization_details` object to downscope the viewer
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
index cad465780..02e295012 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
@@ -13,9 +13,7 @@
@Generated
public interface LakeviewEmbeddedService {
/**
- * Read an information of a published dashboard to mint an OAuth token.
- *
- * Get a required authorization details and scopes of a published dashboard to mint an OAuth
+ * Get a required authorization details and scopes of a published dashboard to mint an OAuth
* token. The `authorization_details` can be enriched to apply additional restriction.
*
* Example: Adding the following `authorization_details` object to downscope the viewer
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
index 66187e358..9baa2f5e0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
@@ -13,11 +13,7 @@
*/
@Generated
public interface LakeviewService {
- /**
- * Create dashboard.
- *
- * Create a draft dashboard.
- */
+ /** Create a draft dashboard. */
Dashboard create(CreateDashboardRequest createDashboardRequest);
/** Create dashboard schedule. */
@@ -32,18 +28,10 @@ public interface LakeviewService {
/** Delete schedule subscription. */
void deleteSubscription(DeleteSubscriptionRequest deleteSubscriptionRequest);
- /**
- * Get dashboard.
- *
- * Get a draft dashboard.
- */
+ /** Get a draft dashboard. */
Dashboard get(GetDashboardRequest getDashboardRequest);
- /**
- * Get published dashboard.
- *
- * Get the current published dashboard.
- */
+ /** Get the current published dashboard. */
PublishedDashboard getPublished(GetPublishedDashboardRequest getPublishedDashboardRequest);
/** Get dashboard schedule. */
@@ -61,39 +49,19 @@ public interface LakeviewService {
/** List schedule subscriptions. */
ListSubscriptionsResponse listSubscriptions(ListSubscriptionsRequest listSubscriptionsRequest);
- /**
- * Migrate dashboard.
- *
- * Migrates a classic SQL dashboard to Lakeview.
- */
+ /** Migrates a classic SQL dashboard to Lakeview. */
Dashboard migrate(MigrateDashboardRequest migrateDashboardRequest);
- /**
- * Publish dashboard.
- *
- * Publish the current draft dashboard.
- */
+ /** Publish the current draft dashboard. */
PublishedDashboard publish(PublishRequest publishRequest);
- /**
- * Trash dashboard.
- *
- * Trash a dashboard.
- */
+ /** Trash a dashboard. */
void trash(TrashDashboardRequest trashDashboardRequest);
- /**
- * Unpublish dashboard.
- *
- * Unpublish the dashboard.
- */
+ /** Unpublish the dashboard. */
void unpublish(UnpublishDashboardRequest unpublishDashboardRequest);
- /**
- * Update dashboard.
- *
- * Update a draft dashboard.
- */
+ /** Update a draft dashboard. */
Dashboard update(UpdateDashboardRequest updateDashboardRequest);
/** Update dashboard schedule. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java
index 2a7a7d8ec..27e018055 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListDashboardsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List dashboards */
@Generated
public class ListDashboardsRequest {
/** The number of dashboards to return per page. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java
index 9ba29e91a..5382151b8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSchedulesRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List dashboard schedules */
@Generated
public class ListSchedulesRequest {
/** UUID identifying the dashboard to which the schedules belongs. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java
index 275fa17db..d38d1dc36 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ListSubscriptionsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List schedule subscriptions */
@Generated
public class ListSubscriptionsRequest {
/** UUID identifying the dashboard which the subscriptions belongs. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java
index 901212478..51f66ade4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Trash dashboard */
@Generated
public class TrashDashboardRequest {
/** UUID identifying the dashboard. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashSpaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashSpaceResponse.java
new file mode 100755
index 000000000..4203305e7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashSpaceResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class TrashSpaceResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(TrashSpaceResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java
index cb8e8e7c4..5277402f6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Unpublish dashboard */
@Generated
public class UnpublishDashboardRequest {
/** UUID identifying the published dashboard. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java
index 84298ffd8..acdb87fe8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Update dashboard */
@Generated
public class UpdateDashboardRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
index cffdc6370..f3faa860d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Update dashboard schedule */
@Generated
public class UpdateScheduleRequest {
/** UUID identifying the dashboard to which the schedule belongs. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java
index a05d27a4a..8a28a5ada 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create a Database Catalog */
@Generated
public class CreateDatabaseCatalogRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
index d7da58737..73af31b6c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create a Database Instance */
@Generated
public class CreateDatabaseInstanceRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
new file mode 100755
index 000000000..28c177a98
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateDatabaseInstanceRoleRequest {
+ /** A DatabaseInstanceRole represents a Postgres role in a database instance. */
+ @JsonProperty("database_instance_role")
+ private DatabaseInstanceRole databaseInstanceRole;
+
+ /** */
+ @JsonIgnore private String instanceName;
+
+ public CreateDatabaseInstanceRoleRequest setDatabaseInstanceRole(
+ DatabaseInstanceRole databaseInstanceRole) {
+ this.databaseInstanceRole = databaseInstanceRole;
+ return this;
+ }
+
+ public DatabaseInstanceRole getDatabaseInstanceRole() {
+ return databaseInstanceRole;
+ }
+
+ public CreateDatabaseInstanceRoleRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDatabaseInstanceRoleRequest that = (CreateDatabaseInstanceRoleRequest) o;
+ return Objects.equals(databaseInstanceRole, that.databaseInstanceRole)
+ && Objects.equals(instanceName, that.instanceName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseInstanceRole, instanceName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDatabaseInstanceRoleRequest.class)
+ .add("databaseInstanceRole", databaseInstanceRole)
+ .add("instanceName", instanceName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
index b3eef0fb3..bfadcb9fc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create a Database Table */
@Generated
public class CreateDatabaseTableRequest {
/** Next field marker: 13 */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
index 02070ad39..7dc427c0d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Create a Synced Database Table */
@Generated
public class CreateSyncedDatabaseTableRequest {
/** Next field marker: 12 */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
index 1d452de83..c7baf5d89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
@@ -43,11 +43,28 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req
return impl.createDatabaseInstance(request);
}
+ public DatabaseInstanceRole createDatabaseInstanceRole(
+ String instanceName, DatabaseInstanceRole databaseInstanceRole) {
+ return createDatabaseInstanceRole(
+ new CreateDatabaseInstanceRoleRequest()
+ .setInstanceName(instanceName)
+ .setDatabaseInstanceRole(databaseInstanceRole));
+ }
+
+ /** Create a role for a Database Instance. */
+ public DatabaseInstanceRole createDatabaseInstanceRole(
+ CreateDatabaseInstanceRoleRequest request) {
+ return impl.createDatabaseInstanceRole(request);
+ }
+
public DatabaseTable createDatabaseTable(DatabaseTable table) {
return createDatabaseTable(new CreateDatabaseTableRequest().setTable(table));
}
- /** Create a Database Table. */
+ /**
+ * Create a Database Table. Useful for registering pre-existing PG tables in UC. See
+ * CreateSyncedDatabaseTable for creating synced tables in PG from a source table in UC.
+ */
public DatabaseTable createDatabaseTable(CreateDatabaseTableRequest request) {
return impl.createDatabaseTable(request);
}
@@ -80,6 +97,16 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) {
impl.deleteDatabaseInstance(request);
}
+ public void deleteDatabaseInstanceRole(String instanceName, String name) {
+ deleteDatabaseInstanceRole(
+ new DeleteDatabaseInstanceRoleRequest().setInstanceName(instanceName).setName(name));
+ }
+
+ /** Deletes a role for a Database Instance. */
+ public void deleteDatabaseInstanceRole(DeleteDatabaseInstanceRoleRequest request) {
+ impl.deleteDatabaseInstanceRole(request);
+ }
+
public void deleteDatabaseTable(String name) {
deleteDatabaseTable(new DeleteDatabaseTableRequest().setName(name));
}
@@ -126,6 +153,16 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request)
return impl.getDatabaseInstance(request);
}
+ public DatabaseInstanceRole getDatabaseInstanceRole(String instanceName, String name) {
+ return getDatabaseInstanceRole(
+ new GetDatabaseInstanceRoleRequest().setInstanceName(instanceName).setName(name));
+ }
+
+ /** Gets a role for a Database Instance. */
+ public DatabaseInstanceRole getDatabaseInstanceRole(GetDatabaseInstanceRoleRequest request) {
+ return impl.getDatabaseInstanceRole(request);
+ }
+
public DatabaseTable getDatabaseTable(String name) {
return getDatabaseTable(new GetDatabaseTableRequest().setName(name));
}
@@ -144,6 +181,27 @@ public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest
return impl.getSyncedDatabaseTable(request);
}
+ public Iterable Input: For specifying the WAL LSN to create a child instance. Optional. Output: Only
+ * populated if provided as input to create a child instance.
+ */
+ @JsonProperty("lsn")
+ private String lsn;
+
+ /** Name of the ref database instance. */
+ @JsonProperty("name")
+ private String name;
+
+ /** Id of the ref database instance. */
+ @JsonProperty("uid")
+ private String uid;
+
+ public DatabaseInstanceRef setBranchTime(String branchTime) {
+ this.branchTime = branchTime;
+ return this;
+ }
+
+ public String getBranchTime() {
+ return branchTime;
+ }
+
+ public DatabaseInstanceRef setEffectiveLsn(String effectiveLsn) {
+ this.effectiveLsn = effectiveLsn;
+ return this;
+ }
+
+ public String getEffectiveLsn() {
+ return effectiveLsn;
+ }
+
+ public DatabaseInstanceRef setLsn(String lsn) {
+ this.lsn = lsn;
+ return this;
+ }
+
+ public String getLsn() {
+ return lsn;
+ }
+
+ public DatabaseInstanceRef setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DatabaseInstanceRef setUid(String uid) {
+ this.uid = uid;
+ return this;
+ }
+
+ public String getUid() {
+ return uid;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseInstanceRef that = (DatabaseInstanceRef) o;
+ return Objects.equals(branchTime, that.branchTime)
+ && Objects.equals(effectiveLsn, that.effectiveLsn)
+ && Objects.equals(lsn, that.lsn)
+ && Objects.equals(name, that.name)
+ && Objects.equals(uid, that.uid);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branchTime, effectiveLsn, lsn, name, uid);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseInstanceRef.class)
+ .add("branchTime", branchTime)
+ .add("effectiveLsn", effectiveLsn)
+ .add("lsn", lsn)
+ .add("name", name)
+ .add("uid", uid)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java
new file mode 100755
index 000000000..cc9b99b4c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRole.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** A DatabaseInstanceRole represents a Postgres role in a database instance. */
+@Generated
+public class DatabaseInstanceRole {
+ /** API-exposed Postgres role attributes */
+ @JsonProperty("attributes")
+ private DatabaseInstanceRoleAttributes attributes;
+
+ /** The type of the role. */
+ @JsonProperty("identity_type")
+ private DatabaseInstanceRoleIdentityType identityType;
+
+ /** An enum value for a standard role that this role is a member of. */
+ @JsonProperty("membership_role")
+ private DatabaseInstanceRoleMembershipRole membershipRole;
+
+ /** The name of the role. This is the unique identifier for the role in an instance. */
+ @JsonProperty("name")
+ private String name;
+
+ public DatabaseInstanceRole setAttributes(DatabaseInstanceRoleAttributes attributes) {
+ this.attributes = attributes;
+ return this;
+ }
+
+ public DatabaseInstanceRoleAttributes getAttributes() {
+ return attributes;
+ }
+
+ public DatabaseInstanceRole setIdentityType(DatabaseInstanceRoleIdentityType identityType) {
+ this.identityType = identityType;
+ return this;
+ }
+
+ public DatabaseInstanceRoleIdentityType getIdentityType() {
+ return identityType;
+ }
+
+ public DatabaseInstanceRole setMembershipRole(DatabaseInstanceRoleMembershipRole membershipRole) {
+ this.membershipRole = membershipRole;
+ return this;
+ }
+
+ public DatabaseInstanceRoleMembershipRole getMembershipRole() {
+ return membershipRole;
+ }
+
+ public DatabaseInstanceRole setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseInstanceRole that = (DatabaseInstanceRole) o;
+ return Objects.equals(attributes, that.attributes)
+ && Objects.equals(identityType, that.identityType)
+ && Objects.equals(membershipRole, that.membershipRole)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(attributes, identityType, membershipRole, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseInstanceRole.class)
+ .add("attributes", attributes)
+ .add("identityType", identityType)
+ .add("membershipRole", membershipRole)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleAttributes.java
new file mode 100755
index 000000000..6e94e970c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleAttributes.java
@@ -0,0 +1,81 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Attributes that can be granted to a Postgres role. We are only implementing a subset for now, see
+ * xref: https://www.postgresql.org/docs/16/sql-createrole.html The values follow Postgres keyword
+ * naming e.g. CREATEDB, BYPASSRLS, etc. which is why they don't include typical underscores between
+ * words. We were requested to make this a nested object/struct representation since these are knobs
+ * from an external spec.
+ */
+@Generated
+public class DatabaseInstanceRoleAttributes {
+ /** */
+ @JsonProperty("bypassrls")
+ private Boolean bypassrls;
+
+ /** */
+ @JsonProperty("createdb")
+ private Boolean createdb;
+
+ /** */
+ @JsonProperty("createrole")
+ private Boolean createrole;
+
+ public DatabaseInstanceRoleAttributes setBypassrls(Boolean bypassrls) {
+ this.bypassrls = bypassrls;
+ return this;
+ }
+
+ public Boolean getBypassrls() {
+ return bypassrls;
+ }
+
+ public DatabaseInstanceRoleAttributes setCreatedb(Boolean createdb) {
+ this.createdb = createdb;
+ return this;
+ }
+
+ public Boolean getCreatedb() {
+ return createdb;
+ }
+
+ public DatabaseInstanceRoleAttributes setCreaterole(Boolean createrole) {
+ this.createrole = createrole;
+ return this;
+ }
+
+ public Boolean getCreaterole() {
+ return createrole;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseInstanceRoleAttributes that = (DatabaseInstanceRoleAttributes) o;
+ return Objects.equals(bypassrls, that.bypassrls)
+ && Objects.equals(createdb, that.createdb)
+ && Objects.equals(createrole, that.createrole);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(bypassrls, createdb, createrole);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseInstanceRoleAttributes.class)
+ .add("bypassrls", bypassrls)
+ .add("createdb", createdb)
+ .add("createrole", createrole)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleIdentityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleIdentityType.java
new file mode 100755
index 000000000..23e1ccdc7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleIdentityType.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum DatabaseInstanceRoleIdentityType {
+ GROUP,
+ PG_ONLY,
+ SERVICE_PRINCIPAL,
+ USER,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleMembershipRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleMembershipRole.java
new file mode 100755
index 000000000..970f66ee8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceRoleMembershipRole.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+
+/** Roles that the DatabaseInstanceRole can be a member of. */
+@Generated
+public enum DatabaseInstanceRoleMembershipRole {
+ DATABRICKS_SUPERUSER,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
index 09dcbff3f..8f67c97ff 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
@@ -19,7 +19,14 @@ public interface DatabaseService {
DatabaseInstance createDatabaseInstance(
CreateDatabaseInstanceRequest createDatabaseInstanceRequest);
- /** Create a Database Table. */
+ /** Create a role for a Database Instance. */
+ DatabaseInstanceRole createDatabaseInstanceRole(
+ CreateDatabaseInstanceRoleRequest createDatabaseInstanceRoleRequest);
+
+ /**
+ * Create a Database Table. Useful for registering pre-existing PG tables in UC. See
+ * CreateSyncedDatabaseTable for creating synced tables in PG from a source table in UC.
+ */
DatabaseTable createDatabaseTable(CreateDatabaseTableRequest createDatabaseTableRequest);
/** Create a Synced Database Table. */
@@ -32,6 +39,10 @@ SyncedDatabaseTable createSyncedDatabaseTable(
/** Delete a Database Instance. */
void deleteDatabaseInstance(DeleteDatabaseInstanceRequest deleteDatabaseInstanceRequest);
+ /** Deletes a role for a Database Instance. */
+ void deleteDatabaseInstanceRole(
+ DeleteDatabaseInstanceRoleRequest deleteDatabaseInstanceRoleRequest);
+
/** Delete a Database Table. */
void deleteDatabaseTable(DeleteDatabaseTableRequest deleteDatabaseTableRequest);
@@ -52,6 +63,10 @@ DatabaseCredential generateDatabaseCredential(
/** Get a Database Instance. */
DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest getDatabaseInstanceRequest);
+ /** Gets a role for a Database Instance. */
+ DatabaseInstanceRole getDatabaseInstanceRole(
+ GetDatabaseInstanceRoleRequest getDatabaseInstanceRoleRequest);
+
/** Get a Database Table. */
DatabaseTable getDatabaseTable(GetDatabaseTableRequest getDatabaseTableRequest);
@@ -59,6 +74,10 @@ DatabaseCredential generateDatabaseCredential(
SyncedDatabaseTable getSyncedDatabaseTable(
GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest);
+ /** START OF PG ROLE APIs Section */
+ ListDatabaseInstanceRolesResponse listDatabaseInstanceRoles(
+ ListDatabaseInstanceRolesRequest listDatabaseInstanceRolesRequest);
+
/** List Database Instances. */
ListDatabaseInstancesResponse listDatabaseInstances(
ListDatabaseInstancesRequest listDatabaseInstancesRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
index 5018d7b59..8bae3d07a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
@@ -20,18 +20,15 @@ public class DatabaseTable {
private String databaseInstanceName;
/**
- * Target Postgres database object (logical database) name for this table. This field is optional
- * in all scenarios.
+ * Target Postgres database object (logical database) name for this table.
*
* When creating a table in a registered Postgres catalog, the target Postgres database name is
* inferred to be that of the registered catalog. If this field is specified in this scenario, the
* Postgres database name MUST match that of the registered catalog (or the request will be
* rejected).
*
- * When creating a table in a standard catalog, the target database name is inferred to be that
- * of the standard catalog. In this scenario, specifying this field will allow targeting an
- * arbitrary postgres database. Note that this has implications for the
- * `create_database_objects_is_missing` field in `spec`.
+ * When creating a table in a standard catalog, this field is required. In this scenario,
+ * specifying this field will allow targeting an arbitrary postgres database.
*/
@JsonProperty("logical_database_name")
private String logicalDatabaseName;
@@ -40,10 +37,6 @@ public class DatabaseTable {
@JsonProperty("name")
private String name;
- /** Data serving REST API URL for this table */
- @JsonProperty("table_serving_url")
- private String tableServingUrl;
-
public DatabaseTable setDatabaseInstanceName(String databaseInstanceName) {
this.databaseInstanceName = databaseInstanceName;
return this;
@@ -71,15 +64,6 @@ public String getName() {
return name;
}
- public DatabaseTable setTableServingUrl(String tableServingUrl) {
- this.tableServingUrl = tableServingUrl;
- return this;
- }
-
- public String getTableServingUrl() {
- return tableServingUrl;
- }
-
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -87,13 +71,12 @@ public boolean equals(Object o) {
DatabaseTable that = (DatabaseTable) o;
return Objects.equals(databaseInstanceName, that.databaseInstanceName)
&& Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
- && Objects.equals(name, that.name)
- && Objects.equals(tableServingUrl, that.tableServingUrl);
+ && Objects.equals(name, that.name);
}
@Override
public int hashCode() {
- return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl);
+ return Objects.hash(databaseInstanceName, logicalDatabaseName, name);
}
@Override
@@ -102,7 +85,6 @@ public String toString() {
.add("databaseInstanceName", databaseInstanceName)
.add("logicalDatabaseName", logicalDatabaseName)
.add("name", name)
- .add("tableServingUrl", tableServingUrl)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java
index 3ac34bf9e..7759d67fc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Database Catalog */
@Generated
public class DeleteDatabaseCatalogRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java
index 81ed118c6..16048b2eb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Database Instance */
@Generated
public class DeleteDatabaseInstanceRequest {
/**
@@ -23,10 +22,12 @@ public class DeleteDatabaseInstanceRequest {
@JsonIgnore private String name;
/**
- * If false, the database instance is soft deleted. Soft deleted instances behave as if they are
- * deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted
- * by calling the undelete API for a limited time. If true, the database instance is hard deleted
- * and cannot be undeleted.
+ * Note purge=false is in development. If false, the database instance is soft deleted
+ * (implementation pending). Soft deleted instances behave as if they are deleted, and cannot be
+ * used for CRUD operations nor connected to. However they can be undeleted by calling the
+ * undelete API for a limited time (implementation pending). If true, the database instance is
+ * hard deleted and cannot be undeleted. For the time being, setting this value to true is
+ * required to delete an instance (soft delete is not yet supported).
*/
@JsonIgnore
@QueryParam("purge")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleRequest.java
new file mode 100755
index 000000000..ef3efc5f5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleRequest.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteDatabaseInstanceRoleRequest {
+ /** This is the AIP standard name for the equivalent of Postgres' `IF EXISTS` option */
+ @JsonIgnore
+ @QueryParam("allow_missing")
+ private Boolean allowMissing;
+
+ /** */
+ @JsonIgnore private String instanceName;
+
+ /** */
+ @JsonIgnore private String name;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("reassign_owned_to")
+ private String reassignOwnedTo;
+
+ public DeleteDatabaseInstanceRoleRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public DeleteDatabaseInstanceRoleRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public DeleteDatabaseInstanceRoleRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DeleteDatabaseInstanceRoleRequest setReassignOwnedTo(String reassignOwnedTo) {
+ this.reassignOwnedTo = reassignOwnedTo;
+ return this;
+ }
+
+ public String getReassignOwnedTo() {
+ return reassignOwnedTo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDatabaseInstanceRoleRequest that = (DeleteDatabaseInstanceRoleRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(instanceName, that.instanceName)
+ && Objects.equals(name, that.name)
+ && Objects.equals(reassignOwnedTo, that.reassignOwnedTo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, instanceName, name, reassignOwnedTo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseInstanceRoleRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("instanceName", instanceName)
+ .add("name", name)
+ .add("reassignOwnedTo", reassignOwnedTo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleResponse.java
new file mode 100755
index 000000000..4620e449a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRoleResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class DeleteDatabaseInstanceRoleResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseInstanceRoleResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java
index a372f064a..fa719b7fa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Database Table */
@Generated
public class DeleteDatabaseTableRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java
index 41d1a388c..43377e486 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a Synced Database Table */
@Generated
public class DeleteSyncedDatabaseTableRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeltaTableSyncInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeltaTableSyncInfo.java
new file mode 100755
index 000000000..d37eca8a5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeltaTableSyncInfo.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DeltaTableSyncInfo {
+ /**
+ * The timestamp when the above Delta version was committed in the source Delta table. Note: This
+ * is the Delta commit time, not the time the data was written to the synced table.
+ */
+ @JsonProperty("delta_commit_timestamp")
+ private String deltaCommitTimestamp;
+
+ /** The Delta Lake commit version that was last successfully synced. */
+ @JsonProperty("delta_commit_version")
+ private Long deltaCommitVersion;
+
+ public DeltaTableSyncInfo setDeltaCommitTimestamp(String deltaCommitTimestamp) {
+ this.deltaCommitTimestamp = deltaCommitTimestamp;
+ return this;
+ }
+
+ public String getDeltaCommitTimestamp() {
+ return deltaCommitTimestamp;
+ }
+
+ public DeltaTableSyncInfo setDeltaCommitVersion(Long deltaCommitVersion) {
+ this.deltaCommitVersion = deltaCommitVersion;
+ return this;
+ }
+
+ public Long getDeltaCommitVersion() {
+ return deltaCommitVersion;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeltaTableSyncInfo that = (DeltaTableSyncInfo) o;
+ return Objects.equals(deltaCommitTimestamp, that.deltaCommitTimestamp)
+ && Objects.equals(deltaCommitVersion, that.deltaCommitVersion);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(deltaCommitTimestamp, deltaCommitVersion);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeltaTableSyncInfo.class)
+ .add("deltaCommitTimestamp", deltaCommitTimestamp)
+ .add("deltaCommitVersion", deltaCommitVersion)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java
index 180eb8971..06210fc7d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Find a Database Instance by uid */
@Generated
public class FindDatabaseInstanceByUidRequest {
/** UID of the cluster to get. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java
index 49550232b..95b33b025 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a Database Catalog */
@Generated
public class GetDatabaseCatalogRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java
index 2aa6924ec..4c66741c9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a Database Instance */
@Generated
public class GetDatabaseInstanceRequest {
/** Name of the cluster to get. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java
new file mode 100755
index 000000000..7640c29c2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetDatabaseInstanceRoleRequest {
+ /** */
+ @JsonIgnore private String instanceName;
+
+ /** */
+ @JsonIgnore private String name;
+
+ public GetDatabaseInstanceRoleRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public GetDatabaseInstanceRoleRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDatabaseInstanceRoleRequest that = (GetDatabaseInstanceRoleRequest) o;
+ return Objects.equals(instanceName, that.instanceName) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(instanceName, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDatabaseInstanceRoleRequest.class)
+ .add("instanceName", instanceName)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java
index 81c25b174..b2c0c89db 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a Database Table */
@Generated
public class GetDatabaseTableRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java
index d15853061..045da6891 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a Synced Database Table */
@Generated
public class GetSyncedDatabaseTableRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java
new file mode 100755
index 000000000..d3dab4f4c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseInstanceRolesRequest {
+ /** */
+ @JsonIgnore private String instanceName;
+
+ /** Upper bound for items returned. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of Database Instances. Requests first page if absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListDatabaseInstanceRolesRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public ListDatabaseInstanceRolesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDatabaseInstanceRolesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseInstanceRolesRequest that = (ListDatabaseInstanceRolesRequest) o;
+ return Objects.equals(instanceName, that.instanceName)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(instanceName, pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseInstanceRolesRequest.class)
+ .add("instanceName", instanceName)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java
new file mode 100755
index 000000000..06b00a301
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseInstanceRolesResponse {
+ /** List of database instance roles. */
+ @JsonProperty("database_instance_roles")
+ private Collection When creating a synced table in a registered Postgres catalog, the target Postgres database
* name is inferred to be that of the registered catalog. If this field is specified in this
* scenario, the Postgres database name MUST match that of the registered catalog (or the request
* will be rejected).
*
- * When creating a synced table in a standard catalog, the target database name is inferred to
- * be that of the standard catalog. In this scenario, specifying this field will allow targeting
- * an arbitrary postgres database.
+ * When creating a synced table in a standard catalog, this field is required. In this
+ * scenario, specifying this field will allow targeting an arbitrary postgres database. Note that
+ * this has implications for the `create_database_objects_is_missing` field in `spec`.
*/
@JsonProperty("logical_database_name")
private String logicalDatabaseName;
@@ -48,10 +47,6 @@ public class SyncedDatabaseTable {
@JsonProperty("spec")
private SyncedTableSpec spec;
- /** Data serving REST API URL for this table */
- @JsonProperty("table_serving_url")
- private String tableServingUrl;
-
/**
* The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
* state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
@@ -106,15 +101,6 @@ public SyncedTableSpec getSpec() {
return spec;
}
- public SyncedDatabaseTable setTableServingUrl(String tableServingUrl) {
- this.tableServingUrl = tableServingUrl;
- return this;
- }
-
- public String getTableServingUrl() {
- return tableServingUrl;
- }
-
public SyncedDatabaseTable setUnityCatalogProvisioningState(
ProvisioningInfoState unityCatalogProvisioningState) {
this.unityCatalogProvisioningState = unityCatalogProvisioningState;
@@ -135,7 +121,6 @@ public boolean equals(Object o) {
&& Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
&& Objects.equals(name, that.name)
&& Objects.equals(spec, that.spec)
- && Objects.equals(tableServingUrl, that.tableServingUrl)
&& Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState);
}
@@ -147,7 +132,6 @@ public int hashCode() {
logicalDatabaseName,
name,
spec,
- tableServingUrl,
unityCatalogProvisioningState);
}
@@ -159,7 +143,6 @@ public String toString() {
.add("logicalDatabaseName", logicalDatabaseName)
.add("name", name)
.add("spec", spec)
- .add("tableServingUrl", tableServingUrl)
.add("unityCatalogProvisioningState", unityCatalogProvisioningState)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java
index b8bd29ac7..b82574350 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java
@@ -17,16 +17,13 @@ public class SyncedTableContinuousUpdateStatus {
@JsonProperty("initial_pipeline_sync_progress")
private SyncedTablePipelineProgress initialPipelineSyncProgress;
- /**
- * The last source table Delta version that was synced to the synced table. Note that this Delta
- * version may not be completely synced to the synced table yet.
- */
+ /** The last source table Delta version that was successfully synced to the synced table. */
@JsonProperty("last_processed_commit_version")
private Long lastProcessedCommitVersion;
/**
- * The timestamp of the last time any data was synchronized from the source table to the synced
- * table.
+ * The end timestamp of the last time any data was synchronized from the source table to the
+ * synced table. This is when the data is available in the synced table.
*/
@JsonProperty("timestamp")
private String timestamp;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java
index e79f9da4f..ff3ccaf29 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java
@@ -14,16 +14,16 @@
@Generated
public class SyncedTableFailedStatus {
/**
- * The last source table Delta version that was synced to the synced table. Note that this Delta
- * version may only be partially synced to the synced table. Only populated if the table is still
- * synced and available for serving.
+ * The last source table Delta version that was successfully synced to the synced table. The last
+ * source table Delta version that was synced to the synced table. Only populated if the table is
+ * still synced and available for serving.
*/
@JsonProperty("last_processed_commit_version")
private Long lastProcessedCommitVersion;
/**
- * The timestamp of the last time any data was synchronized from the source table to the synced
- * table. Only populated if the table is still synced and available for serving.
+ * The end timestamp of the last time any data was synchronized from the source table to the
+ * synced table. Only populated if the table is still synced and available for serving.
*/
@JsonProperty("timestamp")
private String timestamp;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePosition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePosition.java
new file mode 100755
index 000000000..97402c518
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePosition.java
@@ -0,0 +1,81 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SyncedTablePosition {
+ /** */
+ @JsonProperty("delta_table_sync_info")
+ private DeltaTableSyncInfo deltaTableSyncInfo;
+
+ /**
+ * The end timestamp of the most recent successful synchronization. This is the time when the data
+ * is available in the synced table.
+ */
+ @JsonProperty("sync_end_timestamp")
+ private String syncEndTimestamp;
+
+ /**
+ * The starting timestamp of the most recent successful synchronization from the source table to
+ * the destination (synced) table. Note this is the starting timestamp of the sync operation, not
+ * the end time. E.g., for a batch, this is the time when the sync operation started.
+ */
+ @JsonProperty("sync_start_timestamp")
+ private String syncStartTimestamp;
+
+ public SyncedTablePosition setDeltaTableSyncInfo(DeltaTableSyncInfo deltaTableSyncInfo) {
+ this.deltaTableSyncInfo = deltaTableSyncInfo;
+ return this;
+ }
+
+ public DeltaTableSyncInfo getDeltaTableSyncInfo() {
+ return deltaTableSyncInfo;
+ }
+
+ public SyncedTablePosition setSyncEndTimestamp(String syncEndTimestamp) {
+ this.syncEndTimestamp = syncEndTimestamp;
+ return this;
+ }
+
+ public String getSyncEndTimestamp() {
+ return syncEndTimestamp;
+ }
+
+ public SyncedTablePosition setSyncStartTimestamp(String syncStartTimestamp) {
+ this.syncStartTimestamp = syncStartTimestamp;
+ return this;
+ }
+
+ public String getSyncStartTimestamp() {
+ return syncStartTimestamp;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SyncedTablePosition that = (SyncedTablePosition) o;
+ return Objects.equals(deltaTableSyncInfo, that.deltaTableSyncInfo)
+ && Objects.equals(syncEndTimestamp, that.syncEndTimestamp)
+ && Objects.equals(syncStartTimestamp, that.syncStartTimestamp);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(deltaTableSyncInfo, syncEndTimestamp, syncStartTimestamp);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SyncedTablePosition.class)
+ .add("deltaTableSyncInfo", deltaTableSyncInfo)
+ .add("syncEndTimestamp", syncEndTimestamp)
+ .add("syncStartTimestamp", syncStartTimestamp)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java
index c454a0d3c..7df8aab22 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java
@@ -18,14 +18,19 @@ public class SyncedTableSpec {
@JsonProperty("create_database_objects_if_missing")
private Boolean createDatabaseObjectsIfMissing;
- /** Spec of new pipeline. Should be empty if pipeline_id is set */
+ /**
+ * User-specified ID of a pre-existing pipeline to bin pack. This field is optional, and should be
+ * empty if new_pipeline_spec is set. This field will only be set by the server in response
+ * messages if it is specified in the request. The SyncedTableStatus message will always contain
+ * the effective pipeline ID (either client provided or server generated), however.
+ */
+ @JsonProperty("existing_pipeline_id")
+ private String existingPipelineId;
+
+ /** Spec of new pipeline. Should be empty if pipeline_id / existing_pipeline_id is set */
@JsonProperty("new_pipeline_spec")
private NewPipelineSpec newPipelineSpec;
- /** ID of the associated pipeline. Should be empty if new_pipeline_spec is set */
- @JsonProperty("pipeline_id")
- private String pipelineId;
-
/** Primary Key columns to be used for data insert/update in the destination. */
@JsonProperty("primary_key_columns")
private Collection Will always be present if there has been a successful sync. Even if the most recent syncs
+ * have failed.
+ *
+ * Limitation: The only exception is if the synced table is doing a FULL REFRESH, then the last
+ * sync information will not be available until the full refresh is complete. This limitation will
+ * be addressed in a future version.
+ *
+ * This top-level field is a convenience for consumers who want easy access to last sync
+ * information without having to traverse detailed_status.
+ */
+ @JsonProperty("last_sync")
+ private SyncedTablePosition lastSync;
+
/** A text description of the current state of the synced table. */
@JsonProperty("message")
private String message;
+ /**
+ * ID of the associated pipeline. The pipeline ID may have been provided by the client (in the
+ * case of bin packing), or generated by the server (when creating a new pipeline).
+ */
+ @JsonProperty("pipeline_id")
+ private String pipelineId;
+
/**
* Detailed status of a synced table. Shown if the synced table is in the
* PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.
@@ -74,6 +97,15 @@ public SyncedTableFailedStatus getFailedStatus() {
return failedStatus;
}
+ public SyncedTableStatus setLastSync(SyncedTablePosition lastSync) {
+ this.lastSync = lastSync;
+ return this;
+ }
+
+ public SyncedTablePosition getLastSync() {
+ return lastSync;
+ }
+
public SyncedTableStatus setMessage(String message) {
this.message = message;
return this;
@@ -83,6 +115,15 @@ public String getMessage() {
return message;
}
+ public SyncedTableStatus setPipelineId(String pipelineId) {
+ this.pipelineId = pipelineId;
+ return this;
+ }
+
+ public String getPipelineId() {
+ return pipelineId;
+ }
+
public SyncedTableStatus setProvisioningStatus(SyncedTableProvisioningStatus provisioningStatus) {
this.provisioningStatus = provisioningStatus;
return this;
@@ -110,7 +151,9 @@ public boolean equals(Object o) {
return Objects.equals(continuousUpdateStatus, that.continuousUpdateStatus)
&& Objects.equals(detailedState, that.detailedState)
&& Objects.equals(failedStatus, that.failedStatus)
+ && Objects.equals(lastSync, that.lastSync)
&& Objects.equals(message, that.message)
+ && Objects.equals(pipelineId, that.pipelineId)
&& Objects.equals(provisioningStatus, that.provisioningStatus)
&& Objects.equals(triggeredUpdateStatus, that.triggeredUpdateStatus);
}
@@ -121,7 +164,9 @@ public int hashCode() {
continuousUpdateStatus,
detailedState,
failedStatus,
+ lastSync,
message,
+ pipelineId,
provisioningStatus,
triggeredUpdateStatus);
}
@@ -132,7 +177,9 @@ public String toString() {
.add("continuousUpdateStatus", continuousUpdateStatus)
.add("detailedState", detailedState)
.add("failedStatus", failedStatus)
+ .add("lastSync", lastSync)
.add("message", message)
+ .add("pipelineId", pipelineId)
.add("provisioningStatus", provisioningStatus)
.add("triggeredUpdateStatus", triggeredUpdateStatus)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java
index 756e597ec..246810286 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java
@@ -13,16 +13,13 @@
*/
@Generated
public class SyncedTableTriggeredUpdateStatus {
- /**
- * The last source table Delta version that was synced to the synced table. Note that this Delta
- * version may not be completely synced to the synced table yet.
- */
+ /** The last source table Delta version that was successfully synced to the synced table. */
@JsonProperty("last_processed_commit_version")
private Long lastProcessedCommitVersion;
/**
- * The timestamp of the last time any data was synchronized from the source table to the synced
- * table.
+ * The end timestamp of the last time any data was synchronized from the source table to the
+ * synced table. This is when the data is available in the synced table.
*/
@JsonProperty("timestamp")
private String timestamp;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java
index fe0ed445c..9ce802b5b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java
@@ -9,7 +9,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Update a Database Instance */
@Generated
public class UpdateDatabaseInstanceRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java
index 6b8708368..9fb2ebc26 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Create a directory */
@Generated
public class CreateDirectoryRequest {
/** The absolute path of a directory. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
index f1909f28f..f0e7e51e4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
@@ -32,9 +32,7 @@ public void addBlock(long handle, String data) {
}
/**
- * Append data block.
- *
- * Appends a block of data to the stream specified by the input handle. If the handle does not
+ * Appends a block of data to the stream specified by the input handle. If the handle does not
* exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
*
* If the block of data exceeds 1 MB, this call will throw an exception with
@@ -49,10 +47,8 @@ public void close(long handle) {
}
/**
- * Close the stream.
- *
- * Closes the stream specified by the input handle. If the handle does not exist, this call
- * throws an exception with ``RESOURCE_DOES_NOT_EXIST``.
+ * Closes the stream specified by the input handle. If the handle does not exist, this call throws
+ * an exception with ``RESOURCE_DOES_NOT_EXIST``.
*/
public void close(Close request) {
impl.close(request);
@@ -63,9 +59,7 @@ public CreateResponse create(String path) {
}
/**
- * Open a stream.
- *
- * Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute
+ * Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute
* idle timeout on this handle. If a file or directory already exists on the given path and
* __overwrite__ is set to false, this call will throw an exception with
* ``RESOURCE_ALREADY_EXISTS``.
@@ -84,11 +78,9 @@ public void delete(String path) {
}
/**
- * Delete a file/directory.
- *
- * Delete the file or directory (optionally recursively delete all files in the directory).
- * This call throws an exception with `IO_ERROR` if the path is a non-empty directory and
- * `recursive` is set to `false` or on other similar errors.
+ * Delete the file or directory (optionally recursively delete all files in the directory). This
+ * call throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive`
+ * is set to `false` or on other similar errors.
*
* When you delete a large number of files, the delete operation is done in increments. The
* call returns a response after approximately 45 seconds with an error message (503 Service
@@ -111,9 +103,7 @@ public FileInfo getStatus(String path) {
}
/**
- * Get the information of a file or directory.
- *
- * Gets the file information for a file or directory. If the file or directory does not exist,
+ * Gets the file information for a file or directory. If the file or directory does not exist,
* this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
*/
public FileInfo getStatus(GetStatusRequest request) {
@@ -125,9 +115,7 @@ public Iterable List the contents of a directory, or details of the file. If the file or directory does not
+ * List the contents of a directory, or details of the file. If the file or directory does not
* exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
*
* When calling list on a large directory, the list operation will time out after approximately
@@ -146,9 +134,7 @@ public void mkdirs(String path) {
}
/**
- * Create a directory.
- *
- * Creates the given directory and necessary parent directories if they do not exist. If a file
+ * Creates the given directory and necessary parent directories if they do not exist. If a file
* (not a directory) exists at any prefix of the input path, this call throws an exception with
* `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in
* creating some of the necessary parent directories.
@@ -162,9 +148,7 @@ public void move(String sourcePath, String destinationPath) {
}
/**
- * Move a file.
- *
- * Moves a file from one location to another location within DBFS. If the source file does not
+ * Moves a file from one location to another location within DBFS. If the source file does not
* exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists
* in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the
* given source path is a directory, this call always recursively moves all files.
@@ -178,10 +162,8 @@ public void put(String path) {
}
/**
- * Upload a file.
- *
- * Uploads a file through the use of multipart form post. It is mainly used for streaming
- * uploads, but can also be used as a convenient single call for data upload.
+ * Uploads a file through the use of multipart form post. It is mainly used for streaming uploads,
+ * but can also be used as a convenient single call for data upload.
*
* Alternatively you can pass contents as base64 string.
*
@@ -200,12 +182,10 @@ public ReadResponse read(String path) {
}
/**
- * Get the contents of a file.
- *
- * Returns the contents of a file. If the file does not exist, this call throws an exception
- * with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if
- * the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the
- * read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
+ * Returns the contents of a file. If the file does not exist, this call throws an exception with
+ * `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the
+ * offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read
+ * length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
*
* If `offset + length` exceeds the number of bytes in a file, it reads the contents until the
* end of file.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java
index 007d6eba3..f2457b67f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java
@@ -14,9 +14,7 @@
@Generated
public interface DbfsService {
/**
- * Append data block.
- *
- * Appends a block of data to the stream specified by the input handle. If the handle does not
+ * Appends a block of data to the stream specified by the input handle. If the handle does not
* exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
*
* If the block of data exceeds 1 MB, this call will throw an exception with
@@ -25,17 +23,13 @@ public interface DbfsService {
void addBlock(AddBlock addBlock);
/**
- * Close the stream.
- *
- * Closes the stream specified by the input handle. If the handle does not exist, this call
- * throws an exception with ``RESOURCE_DOES_NOT_EXIST``.
+ * Closes the stream specified by the input handle. If the handle does not exist, this call throws
+ * an exception with ``RESOURCE_DOES_NOT_EXIST``.
*/
void close(Close close);
/**
- * Open a stream.
- *
- * Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute
+ * Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute
* idle timeout on this handle. If a file or directory already exists on the given path and
* __overwrite__ is set to false, this call will throw an exception with
* ``RESOURCE_ALREADY_EXISTS``.
@@ -48,11 +42,9 @@ public interface DbfsService {
CreateResponse create(Create create);
/**
- * Delete a file/directory.
- *
- * Delete the file or directory (optionally recursively delete all files in the directory).
- * This call throws an exception with `IO_ERROR` if the path is a non-empty directory and
- * `recursive` is set to `false` or on other similar errors.
+ * Delete the file or directory (optionally recursively delete all files in the directory). This
+ * call throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive`
+ * is set to `false` or on other similar errors.
*
* When you delete a large number of files, the delete operation is done in increments. The
* call returns a response after approximately 45 seconds with an error message (503 Service
@@ -69,17 +61,13 @@ public interface DbfsService {
void delete(Delete delete);
/**
- * Get the information of a file or directory.
- *
- * Gets the file information for a file or directory. If the file or directory does not exist,
+ * Gets the file information for a file or directory. If the file or directory does not exist,
* this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
*/
FileInfo getStatus(GetStatusRequest getStatusRequest);
/**
- * List directory contents or file details.
- *
- * List the contents of a directory, or details of the file. If the file or directory does not
+ * List the contents of a directory, or details of the file. If the file or directory does not
* exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
*
* When calling list on a large directory, the list operation will time out after approximately
@@ -92,9 +80,7 @@ public interface DbfsService {
ListStatusResponse list(ListDbfsRequest listDbfsRequest);
/**
- * Create a directory.
- *
- * Creates the given directory and necessary parent directories if they do not exist. If a file
+ * Creates the given directory and necessary parent directories if they do not exist. If a file
* (not a directory) exists at any prefix of the input path, this call throws an exception with
* `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in
* creating some of the necessary parent directories.
@@ -102,9 +88,7 @@ public interface DbfsService {
void mkdirs(MkDirs mkDirs);
/**
- * Move a file.
- *
- * Moves a file from one location to another location within DBFS. If the source file does not
+ * Moves a file from one location to another location within DBFS. If the source file does not
* exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists
* in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the
* given source path is a directory, this call always recursively moves all files.
@@ -112,10 +96,8 @@ public interface DbfsService {
void move(Move move);
/**
- * Upload a file.
- *
- * Uploads a file through the use of multipart form post. It is mainly used for streaming
- * uploads, but can also be used as a convenient single call for data upload.
+ * Uploads a file through the use of multipart form post. It is mainly used for streaming uploads,
+ * but can also be used as a convenient single call for data upload.
*
* Alternatively you can pass contents as base64 string.
*
@@ -128,12 +110,10 @@ public interface DbfsService {
void put(Put put);
/**
- * Get the contents of a file.
- *
- * Returns the contents of a file. If the file does not exist, this call throws an exception
- * with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if
- * the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the
- * read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
+ * Returns the contents of a file. If the file does not exist, this call throws an exception with
+ * `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the
+ * offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read
+ * length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
*
* If `offset + length` exceeds the number of bytes in a file, it reads the contents until the
* end of file.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java
index 462b1a8a4..db3e38e17 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a directory */
@Generated
public class DeleteDirectoryRequest {
/** The absolute path of a directory. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java
index 14fb11c3e..04b79d076 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Delete a file */
@Generated
public class DeleteFileRequest {
/** The absolute path of the file. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java
index 91e5cefa5..cebd8307a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Download a file */
@Generated
public class DownloadRequest {
/** The absolute path of the file. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
index 7bfecc4ca..ee87aebe4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
@@ -51,12 +51,9 @@ public void createDirectory(String directoryPath) {
}
/**
- * Create a directory.
- *
- * Creates an empty directory. If necessary, also creates any parent directories of the new,
- * empty directory (like the shell command `mkdir -p`). If called on an existing directory,
- * returns a success response; this method is idempotent (it will succeed if the directory already
- * exists).
+ * Creates an empty directory. If necessary, also creates any parent directories of the new, empty
+ * directory (like the shell command `mkdir -p`). If called on an existing directory, returns a
+ * success response; this method is idempotent (it will succeed if the directory already exists).
*/
public void createDirectory(CreateDirectoryRequest request) {
impl.createDirectory(request);
@@ -66,11 +63,7 @@ public void delete(String filePath) {
delete(new DeleteFileRequest().setFilePath(filePath));
}
- /**
- * Delete a file.
- *
- * Deletes a file. If the request is successful, there is no response body.
- */
+ /** Deletes a file. If the request is successful, there is no response body. */
public void delete(DeleteFileRequest request) {
impl.delete(request);
}
@@ -80,9 +73,7 @@ public void deleteDirectory(String directoryPath) {
}
/**
- * Delete a directory.
- *
- * Deletes an empty directory.
+ * Deletes an empty directory.
*
* To delete a non-empty directory, first delete all of its contents. This can be done by
* listing the directory contents and deleting each file and subdirectory recursively.
@@ -96,9 +87,7 @@ public DownloadResponse download(String filePath) {
}
/**
- * Download a file.
- *
- * Downloads a file. The file contents are the response body. This is a standard HTTP file
+ * Downloads a file. The file contents are the response body. This is a standard HTTP file
* download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
*/
public DownloadResponse download(DownloadRequest request) {
@@ -110,9 +99,7 @@ public void getDirectoryMetadata(String directoryPath) {
}
/**
- * Get directory metadata.
- *
- * Get the metadata of a directory. The response HTTP headers contain the metadata. There is no
+ * Get the metadata of a directory. The response HTTP headers contain the metadata. There is no
* response body.
*
* This method is useful to check if a directory exists and the caller has access to it.
@@ -130,9 +117,7 @@ public GetMetadataResponse getMetadata(String filePath) {
}
/**
- * Get file metadata.
- *
- * Get the metadata of a file. The response HTTP headers contain the metadata. There is no
+ * Get the metadata of a file. The response HTTP headers contain the metadata. There is no
* response body.
*/
public GetMetadataResponse getMetadata(GetMetadataRequest request) {
@@ -145,9 +130,7 @@ public Iterable Returns the contents of a directory. If there is no directory at the specified path, the API
+ * Returns the contents of a directory. If there is no directory at the specified path, the API
* returns a HTTP 404 error.
*/
public Iterable Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw
+ * Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw
* bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The
* contents of the resulting file will be exactly the bytes sent in the request body. If the
* request is successful, there is no response body.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
index 791175943..e2d7724f6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
@@ -32,26 +32,17 @@
@Generated
public interface FilesService {
/**
- * Create a directory.
- *
- * Creates an empty directory. If necessary, also creates any parent directories of the new,
- * empty directory (like the shell command `mkdir -p`). If called on an existing directory,
- * returns a success response; this method is idempotent (it will succeed if the directory already
- * exists).
+ * Creates an empty directory. If necessary, also creates any parent directories of the new, empty
+ * directory (like the shell command `mkdir -p`). If called on an existing directory, returns a
+ * success response; this method is idempotent (it will succeed if the directory already exists).
*/
void createDirectory(CreateDirectoryRequest createDirectoryRequest);
- /**
- * Delete a file.
- *
- * Deletes a file. If the request is successful, there is no response body.
- */
+ /** Deletes a file. If the request is successful, there is no response body. */
void delete(DeleteFileRequest deleteFileRequest);
/**
- * Delete a directory.
- *
- * Deletes an empty directory.
+ * Deletes an empty directory.
*
* To delete a non-empty directory, first delete all of its contents. This can be done by
* listing the directory contents and deleting each file and subdirectory recursively.
@@ -59,17 +50,13 @@ public interface FilesService {
void deleteDirectory(DeleteDirectoryRequest deleteDirectoryRequest);
/**
- * Download a file.
- *
- * Downloads a file. The file contents are the response body. This is a standard HTTP file
+ * Downloads a file. The file contents are the response body. This is a standard HTTP file
* download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
*/
DownloadResponse download(DownloadRequest downloadRequest);
/**
- * Get directory metadata.
- *
- * Get the metadata of a directory. The response HTTP headers contain the metadata. There is no
+ * Get the metadata of a directory. The response HTTP headers contain the metadata. There is no
* response body.
*
* This method is useful to check if a directory exists and the caller has access to it.
@@ -81,26 +68,20 @@ public interface FilesService {
void getDirectoryMetadata(GetDirectoryMetadataRequest getDirectoryMetadataRequest);
/**
- * Get file metadata.
- *
- * Get the metadata of a file. The response HTTP headers contain the metadata. There is no
+ * Get the metadata of a file. The response HTTP headers contain the metadata. There is no
* response body.
*/
GetMetadataResponse getMetadata(GetMetadataRequest getMetadataRequest);
/**
- * List directory contents.
- *
- * Returns the contents of a directory. If there is no directory at the specified path, the API
+ * Returns the contents of a directory. If there is no directory at the specified path, the API
* returns a HTTP 404 error.
*/
ListDirectoryResponse listDirectoryContents(
ListDirectoryContentsRequest listDirectoryContentsRequest);
/**
- * Upload a file.
- *
- * Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw
+ * Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw
* bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The
* contents of the resulting file will be exactly the bytes sent in the request body. If the
* request is successful, there is no response body.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java
index 7225379c4..2784a7136 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get directory metadata */
@Generated
public class GetDirectoryMetadataRequest {
/** The absolute path of a directory. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java
index 05693c633..e6f512571 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get file metadata */
@Generated
public class GetMetadataRequest {
/** The absolute path of the file. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java
index f5091a01c..ad0ab1056 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get the information of a file or directory */
@Generated
public class GetStatusRequest {
/** The path of the file or directory. The path should be the absolute DBFS path. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java
index 5b66be4cc..ec8d127f4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDbfsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List directory contents or file details */
@Generated
public class ListDbfsRequest {
/** The path of the file or directory. The path should be the absolute DBFS path. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java
index 684dde699..842f6c511 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryContentsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** List directory contents */
@Generated
public class ListDirectoryContentsRequest {
/** The absolute path of a directory. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java
index 945e0c78d..845c0de43 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java
@@ -8,7 +8,6 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get the contents of a file */
@Generated
public class ReadDbfsRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java
index a6c715147..ae27966cc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java
@@ -9,7 +9,6 @@
import java.io.InputStream;
import java.util.Objects;
-/** Upload a file */
@Generated
public class UploadRequest {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java
index cfa50b192..24c609428 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlAPI.java
@@ -33,10 +33,8 @@ public GetAssignableRolesForResourceResponse getAssignableRolesForResource(Strin
}
/**
- * Get assignable roles for a resource.
- *
- * Gets all the roles that can be granted on an account level resource. A role is grantable if
- * the rule set on the resource can contain an access rule of the role.
+ * Gets all the roles that can be granted on an account level resource. A role is grantable if the
+ * rule set on the resource can contain an access rule of the role.
*/
public GetAssignableRolesForResourceResponse getAssignableRolesForResource(
GetAssignableRolesForResourceRequest request) {
@@ -48,10 +46,8 @@ public RuleSetResponse getRuleSet(String name, String etag) {
}
/**
- * Get a rule set.
- *
- * Get a rule set by its name. A rule set is always attached to a resource and contains a list
- * of access rules on the said resource. Currently only a default rule set for each resource is
+ * Get a rule set by its name. A rule set is always attached to a resource and contains a list of
+ * access rules on the said resource. Currently only a default rule set for each resource is
* supported.
*/
public RuleSetResponse getRuleSet(GetRuleSetRequest request) {
@@ -63,9 +59,7 @@ public RuleSetResponse updateRuleSet(String name, RuleSetUpdateRequest ruleSet)
}
/**
- * Update a rule set.
- *
- * Replace the rules of a rule set. First, use get to read the current version of the rule set
+ * Replace the rules of a rule set. First, use get to read the current version of the rule set
* before modifying it. This pattern helps prevent conflicts between concurrent updates.
*/
public RuleSetResponse updateRuleSet(UpdateRuleSetRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java
index 6d95a309b..af1e94ffe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyAPI.java
@@ -34,10 +34,8 @@ public GetAssignableRolesForResourceResponse getAssignableRolesForResource(Strin
}
/**
- * Get assignable roles for a resource.
- *
- * Gets all the roles that can be granted on an account level resource. A role is grantable if
- * the rule set on the resource can contain an access rule of the role.
+ * Gets all the roles that can be granted on an account level resource. A role is grantable if the
+ * rule set on the resource can contain an access rule of the role.
*/
public GetAssignableRolesForResourceResponse getAssignableRolesForResource(
GetAssignableRolesForResourceRequest request) {
@@ -49,10 +47,8 @@ public RuleSetResponse getRuleSet(String name, String etag) {
}
/**
- * Get a rule set.
- *
- * Get a rule set by its name. A rule set is always attached to a resource and contains a list
- * of access rules on the said resource. Currently only a default rule set for each resource is
+ * Get a rule set by its name. A rule set is always attached to a resource and contains a list of
+ * access rules on the said resource. Currently only a default rule set for each resource is
* supported.
*/
public RuleSetResponse getRuleSet(GetRuleSetRequest request) {
@@ -64,9 +60,7 @@ public RuleSetResponse updateRuleSet(String name, RuleSetUpdateRequest ruleSet)
}
/**
- * Update a rule set.
- *
- * Replace the rules of a rule set. First, use get to read the current version of the rule set
+ * Replace the rules of a rule set. First, use get to read the current version of the rule set
* before modifying it. This pattern helps prevent conflicts between concurrent updates.
*/
public RuleSetResponse updateRuleSet(UpdateRuleSetRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java
index 3de92d68e..71f1d1145 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyService.java
@@ -16,27 +16,21 @@
@Generated
public interface AccountAccessControlProxyService {
/**
- * Get assignable roles for a resource.
- *
- * Gets all the roles that can be granted on an account level resource. A role is grantable if
- * the rule set on the resource can contain an access rule of the role.
+ * Gets all the roles that can be granted on an account level resource. A role is grantable if the
+ * rule set on the resource can contain an access rule of the role.
*/
GetAssignableRolesForResourceResponse getAssignableRolesForResource(
GetAssignableRolesForResourceRequest getAssignableRolesForResourceRequest);
/**
- * Get a rule set.
- *
- * Get a rule set by its name. A rule set is always attached to a resource and contains a list
- * of access rules on the said resource. Currently only a default rule set for each resource is
+ * Get a rule set by its name. A rule set is always attached to a resource and contains a list of
+ * access rules on the said resource. Currently only a default rule set for each resource is
* supported.
*/
RuleSetResponse getRuleSet(GetRuleSetRequest getRuleSetRequest);
/**
- * Update a rule set.
- *
- * Replace the rules of a rule set. First, use get to read the current version of the rule set
+ * Replace the rules of a rule set. First, use get to read the current version of the rule set
* before modifying it. This pattern helps prevent conflicts between concurrent updates.
*/
RuleSetResponse updateRuleSet(UpdateRuleSetRequest updateRuleSetRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlService.java
index 3b5f6f0cb..b7825e379 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlService.java
@@ -15,27 +15,21 @@
@Generated
public interface AccountAccessControlService {
/**
- * Get assignable roles for a resource.
- *
- * Gets all the roles that can be granted on an account level resource. A role is grantable if
- * the rule set on the resource can contain an access rule of the role.
+ * Gets all the roles that can be granted on an account level resource. A role is grantable if the
+ * rule set on the resource can contain an access rule of the role.
*/
GetAssignableRolesForResourceResponse getAssignableRolesForResource(
GetAssignableRolesForResourceRequest getAssignableRolesForResourceRequest);
/**
- * Get a rule set.
- *
- * Get a rule set by its name. A rule set is always attached to a resource and contains a list
- * of access rules on the said resource. Currently only a default rule set for each resource is
+ * Get a rule set by its name. A rule set is always attached to a resource and contains a list of
+ * access rules on the said resource. Currently only a default rule set for each resource is
* supported.
*/
RuleSetResponse getRuleSet(GetRuleSetRequest getRuleSetRequest);
/**
- * Update a rule set.
- *
- * Replace the rules of a rule set. First, use get to read the current version of the rule set
+ * Replace the rules of a rule set. First, use get to read the current version of the rule set
* before modifying it. This pattern helps prevent conflicts between concurrent updates.
*/
RuleSetResponse updateRuleSet(UpdateRuleSetRequest updateRuleSetRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
index 7016a0673..4e0bbc9e7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
@@ -32,10 +32,7 @@ public AccountGroupsAPI(AccountGroupsService mock) {
}
/**
- * Create a new group.
- *
- * Creates a group in the Databricks account with a unique name, using the supplied group
- * details.
+ * Creates a group in the Databricks account with a unique name, using the supplied group details.
*/
public Group create(Group request) {
return impl.create(request);
@@ -45,11 +42,7 @@ public void delete(String id) {
delete(new DeleteAccountGroupRequest().setId(id));
}
- /**
- * Delete a group.
- *
- * Deletes a group from the Databricks account.
- */
+ /** Deletes a group from the Databricks account. */
public void delete(DeleteAccountGroupRequest request) {
impl.delete(request);
}
@@ -58,20 +51,12 @@ public Group get(String id) {
return get(new GetAccountGroupRequest().setId(id));
}
- /**
- * Get group details.
- *
- * Gets the information for a specific group in the Databricks account.
- */
+ /** Gets the information for a specific group in the Databricks account. */
public Group get(GetAccountGroupRequest request) {
return impl.get(request);
}
- /**
- * List group details.
- *
- * Gets all details of the groups associated with the Databricks account.
- */
+ /** Gets all details of the groups associated with the Databricks account. */
public Iterable