diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 200f9513f..e5aff5d67 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -633dafff1aa6f0198a576cf83bfa81b2b4f27d46 \ No newline at end of file +a0bc51d001ca139a81dd6d192ae12394a3ca0834 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 06981143e..b9ccbe751 100755 --- a/.gitattributes +++ b/.gitattributes @@ -117,7 +117,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudge databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java linguist-generated=true @@ -145,7 +144,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDelivery databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecField.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudget.java linguist-generated=true @@ -184,7 +182,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAll databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java linguist-generated=true @@ -198,7 +195,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueS databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogIsolationMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java linguist-generated=true @@ -208,6 +204,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsSer databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnRelationship.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java linguist-generated=true @@ -219,7 +216,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousU databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLineageRelationshipRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequest.java linguist-generated=true @@ -231,7 +230,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetas databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true @@ -252,19 +251,21 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccou databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true @@ -275,7 +276,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharin databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java linguist-generated=true @@ -283,13 +283,31 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadata.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadataInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageFileInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersion.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersionInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageObject.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineagePath.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationship.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationshipInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true @@ -325,6 +343,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnecti databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java linguist-generated=true @@ -346,6 +365,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java linguist-generated=true @@ -356,8 +376,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnect databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java linguist-generated=true @@ -466,6 +490,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchem databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java linguist-generated=true @@ -487,13 +512,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTa databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLineageRelationshipRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java linguist-generated=true @@ -502,7 +527,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonit databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java linguist-generated=true @@ -566,7 +591,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCl databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java linguist-generated=true @@ -580,7 +604,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListClea databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java linguist-generated=true @@ -588,9 +611,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAvailabi databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java linguist-generated=true @@ -651,15 +672,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEv databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContext.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeAzureDiskVolumeType.java linguist-generated=true @@ -668,12 +684,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasic databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EbsVolumeType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java linguist-generated=true @@ -716,7 +728,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptE databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibraries.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java linguist-generated=true @@ -776,9 +787,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanc databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java linguist-generated=true @@ -790,11 +799,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamil databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResultType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java linguist-generated=true @@ -803,19 +809,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersion.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/State.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReason.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibraries.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true @@ -893,10 +894,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrip databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashSpaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true @@ -941,6 +940,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDataba databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaimsPermissionSet.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedResource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true @@ -954,21 +956,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Delete.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntry.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java linguist-generated=true @@ -977,7 +974,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java linguist-generated=true @@ -986,15 +982,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectory databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirs.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Move.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Put.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java linguist-generated=true @@ -1027,11 +1019,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGr databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java linguist-generated=true @@ -1079,7 +1069,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermiss databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Patch.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchOp.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignment.java linguist-generated=true @@ -1107,7 +1096,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipa databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java linguist-generated=true @@ -1124,9 +1112,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Authentication databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java linguist-generated=true @@ -1155,9 +1141,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformTas databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChange.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponse.java linguist-generated=true @@ -1241,7 +1225,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryI databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJob.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValues.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValues.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValues.java linguist-generated=true @@ -1305,7 +1288,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSetting databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerStateProto.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItem.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport.java linguist-generated=true @@ -1354,17 +1336,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateP databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefresh.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeltaSharingRecipientType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java linguist-generated=true @@ -1454,7 +1430,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Provide databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java linguist-generated=true @@ -1508,36 +1483,24 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookRes databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineStoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineStoreResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTag.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponse.java linguist-generated=true @@ -1550,10 +1513,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageFeatureSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageOnlineFeature.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java linguist-generated=true @@ -1614,19 +1579,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatch.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModel.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java linguist-generated=true @@ -1664,6 +1622,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelP databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryEmailSubscriptionType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhookEvent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhookStatus.java linguist-generated=true @@ -1672,9 +1631,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransition databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java linguist-generated=true @@ -1696,18 +1653,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResp databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Status.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java linguist-generated=true @@ -1716,7 +1666,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageR databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java linguist-generated=true @@ -1744,11 +1693,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppInt databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java linguist-generated=true @@ -1787,9 +1733,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrinc databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true @@ -1797,10 +1741,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigg databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLevel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java linguist-generated=true @@ -1862,7 +1804,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFram databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateCause.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java linguist-generated=true @@ -1896,7 +1837,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Delete databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java linguist-generated=true @@ -1933,14 +1873,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Privat databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessLevel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java linguist-generated=true @@ -1997,7 +1935,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProvi databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElement.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElementObject.java linguist-generated=true @@ -2145,6 +2082,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNam databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java linguist-generated=true @@ -2156,6 +2097,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDash databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java linguist-generated=true @@ -2173,7 +2116,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNoti databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequest.java linguist-generated=true @@ -2214,7 +2156,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetw databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressResourceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java linguist-generated=true @@ -2254,6 +2195,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplia databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java linguist-generated=true @@ -2339,7 +2281,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalCo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessage.java linguist-generated=true @@ -2347,8 +2288,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java linguist-generated=true @@ -2381,6 +2320,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComp databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWarehouseIdRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java linguist-generated=true @@ -2398,7 +2338,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetw databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java linguist-generated=true @@ -2418,7 +2357,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java linguist-generated=true @@ -2431,7 +2369,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionPar databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java linguist-generated=true @@ -2536,7 +2473,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Service.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java linguist-generated=true @@ -2586,16 +2522,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboard databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Disposition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlert.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestWarehouseType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPair.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java linguist-generated=true @@ -2693,7 +2625,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigSer databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java linguist-generated=true @@ -2706,10 +2637,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestSecurityPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SpotInstancePolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/State.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java linguist-generated=true @@ -2720,7 +2649,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementState. databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Status.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java linguist-generated=true @@ -2740,7 +2668,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQuery.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualization.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateWidgetRequest.java linguist-generated=true @@ -2770,9 +2697,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Delete databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java linguist-generated=true @@ -2803,7 +2728,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVe databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java linguist-generated=true @@ -2828,20 +2752,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCre databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Delete.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAcl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java linguist-generated=true @@ -2864,7 +2781,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCreden databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Language.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponse.java linguist-generated=true @@ -2877,13 +2793,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecre databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Mkdirs.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAcl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecret.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java linguist-generated=true @@ -2904,9 +2817,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsSe databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 23b1bfc2a..dbcc4e7b9 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -72,3 +72,42 @@ * [Breaking] Removed `HIVE_CUSTOM` and `HIVE_SERDE` enum values for `com.databricks.sdk.service.catalog.DataSourceFormat`. * [Breaking] Removed `UNKNOWN_SECURABLE_TYPE` enum value for `com.databricks.sdk.service.catalog.SecurableType`. * [Breaking] Removed `CANCELLED`, `ERROR`, `QUEUED`, `RUNNING`, `STARTING` and `SUCCESS` enum values for `com.databricks.sdk.service.jobs.DbtCloudRunStatus`. +* Added `workspaceClient.externalLineage()` service and `workspaceClient.externalMetadata()` service. +* Added `workspaceClient.defaultWarehouseId()` service. +* Added `claims` field for `com.databricks.sdk.service.database.GenerateDatabaseCredentialRequest`. +* Added `activity` field for `com.databricks.sdk.service.ml.DeleteTransitionRequestResponse`. +* Added `maxResults` field for `com.databricks.sdk.service.ml.ListWebhooksRequest`. +* Added `body` and `statusCode` fields for `com.databricks.sdk.service.ml.TestRegistryWebhookResponse`. +* Added `modelVersionDatabricks` field for `com.databricks.sdk.service.ml.TransitionStageResponse`. +* Added `registeredModel` field for `com.databricks.sdk.service.ml.UpdateModelResponse`. +* Added `modelVersion` field for `com.databricks.sdk.service.ml.UpdateModelVersionResponse`. +* Added `webhook` field for `com.databricks.sdk.service.ml.UpdateWebhookResponse`. +* Added `runAs` field for `com.databricks.sdk.service.pipelines.GetPipelineResponse`. +* Added `principal` field for `com.databricks.sdk.service.serving.AiGatewayRateLimit`. +* Added `ANY_STATIC_CREDENTIAL` enum value for `com.databricks.sdk.service.catalog.CredentialType`. +* Added `TABLE_DELTA_ICEBERG_DELTASHARING` enum value for `com.databricks.sdk.service.catalog.SecurableKind`. +* Added `SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION` enum value for `com.databricks.sdk.service.compute.TerminationReasonCode`. +* Added `CAN_CREATE_REGISTERED_MODEL` enum value for `com.databricks.sdk.service.ml.PermissionLevel`. +* Added `BIGQUERY` enum value for `com.databricks.sdk.service.pipelines.IngestionSourceType`. +* Added `SERVICE_PRINCIPAL` and `USER_GROUP` enum values for `com.databricks.sdk.service.serving.AiGatewayRateLimitKey`. +* Added `DELTA_ICEBERG_TABLE` enum value for `com.databricks.sdk.service.sharing.TableInternalAttributesSharedTableType`. +* [Breaking] Changed `deleteTransitionRequest()`, `updateModel()`, `updateModelVersion()` and `updateWebhook()` methods for `workspaceClient.modelRegistry()` service return type to become non-empty. +* [Breaking] Changed `deleteWebhook()` method for `workspaceClient.modelRegistry()` service with new required argument order. +* [Breaking] Changed `fromStage` and `toStage` fields for `com.databricks.sdk.service.ml.Activity` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.ApproveTransitionRequest` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.CreateTransitionRequest` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.DeleteTransitionRequestRequest` to type `String` class. +* [Breaking] Changed `id` field for `com.databricks.sdk.service.ml.DeleteWebhookRequest` to be required. +* [Breaking] Changed `key` field for `com.databricks.sdk.service.ml.FeatureTag` to be required. +* Changed `key` field for `com.databricks.sdk.service.ml.FeatureTag` to be required. +* [Breaking] Changed `capacity` field for `com.databricks.sdk.service.ml.OnlineStore` to be required. +* Changed `capacity` field for `com.databricks.sdk.service.ml.OnlineStore` to be required. +* [Breaking] Changed `onlineTableName` field for `com.databricks.sdk.service.ml.PublishSpec` to be required. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.RejectTransitionRequest` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.TransitionModelVersionStageDatabricks` to type `String` class. +* [Breaking] Changed `toStage` field for `com.databricks.sdk.service.ml.TransitionRequest` to type `String` class. +* [Breaking] Removed `allowedOptions` and `requiredOptions` fields for `com.databricks.sdk.service.catalog.SecurableKindManifest`. +* [Breaking] Removed `webhook` field for `com.databricks.sdk.service.ml.TestRegistryWebhookResponse`. +* [Breaking] Removed `modelVersion` field for `com.databricks.sdk.service.ml.TransitionStageResponse`. +* [Breaking] Removed `ARCHIVED`, `NONE`, `PRODUCTION` and `STAGING` enum values for `com.databricks.sdk.service.ml.DeleteTransitionRequestStage`. +* [Breaking] Removed `ARCHIVED`, `NONE`, `PRODUCTION` and `STAGING` enum values for `com.databricks.sdk.service.ml.Stage`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index bf4f6e180..0f4053b8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -20,8 +20,12 @@ import com.databricks.sdk.service.catalog.ConnectionsService; import com.databricks.sdk.service.catalog.CredentialsAPI; import com.databricks.sdk.service.catalog.CredentialsService; +import com.databricks.sdk.service.catalog.ExternalLineageAPI; +import com.databricks.sdk.service.catalog.ExternalLineageService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; +import com.databricks.sdk.service.catalog.ExternalMetadataAPI; +import com.databricks.sdk.service.catalog.ExternalMetadataService; import com.databricks.sdk.service.catalog.FunctionsAPI; import com.databricks.sdk.service.catalog.FunctionsService; import com.databricks.sdk.service.catalog.GrantsAPI; @@ -254,7 +258,9 @@ public class WorkspaceClient { private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private ExperimentsAPI experimentsAPI; + private ExternalLineageAPI externalLineageAPI; private ExternalLocationsAPI externalLocationsAPI; + private ExternalMetadataAPI externalMetadataAPI; private FeatureStoreAPI featureStoreAPI; private FilesAPI filesAPI; private FunctionsAPI functionsAPI; @@ -367,7 +373,9 @@ public WorkspaceClient(DatabricksConfig config) { dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); experimentsAPI = new ExperimentsAPI(apiClient); + externalLineageAPI = new ExternalLineageAPI(apiClient); externalLocationsAPI = new ExternalLocationsAPI(apiClient); + externalMetadataAPI = new ExternalMetadataAPI(apiClient); featureStoreAPI = new FeatureStoreAPI(apiClient); filesAPI = new FilesAPI(apiClient); functionsAPI = new FunctionsAPI(apiClient); @@ -550,7 +558,7 @@ public CleanRoomTaskRunsAPI cleanRoomTaskRuns() { /** * A clean room uses Delta Sharing and serverless compute to provide a secure and * privacy-protecting environment where multiple parties can work together on sensitive enterprise - * data without direct access to each other’s data. + * data without direct access to each other's data. */ public CleanRoomsAPI cleanRooms() { return cleanRoomsAPI; @@ -778,6 +786,18 @@ public ExperimentsAPI experiments() { return experimentsAPI; } + /** + * External Lineage APIs enable defining and managing lineage relationships between Databricks + * objects and external systems. These APIs allow users to capture data flows connecting + * Databricks tables, models, and file paths with external metadata objects. + * + *

With these APIs, users can create, update, delete, and list lineage relationships with + * support for column-level mappings and custom properties. + */ + public ExternalLineageAPI externalLineage() { + return externalLineageAPI; + } + /** * An external location is an object that combines a cloud storage path with a storage credential * that authorizes access to the cloud storage path. Each external location is subject to Unity @@ -795,6 +815,18 @@ public ExternalLocationsAPI externalLocations() { return externalLocationsAPI; } + /** + * External Metadata objects enable customers to register and manage metadata about external + * systems within Unity Catalog. + * + *

These APIs provide a standardized way to create, update, retrieve, list, and delete external + * metadata objects. Fine-grained authorization ensures that only users with appropriate + * permissions can view and manage external metadata objects. + */ + public ExternalMetadataAPI externalMetadata() { + return externalMetadataAPI; + } + /** * A feature store is a centralized repository that enables data scientists to find and share * features. Using a feature store also ensures that the code used to compute feature values is @@ -2162,6 +2194,17 @@ public WorkspaceClient withExperimentsAPI(ExperimentsAPI experiments) { return this; } + /** Replace the default ExternalLineageService with a custom implementation. */ + public WorkspaceClient withExternalLineageImpl(ExternalLineageService externalLineage) { + return this.withExternalLineageAPI(new ExternalLineageAPI(externalLineage)); + } + + /** Replace the default ExternalLineageAPI with a custom implementation. */ + public WorkspaceClient withExternalLineageAPI(ExternalLineageAPI externalLineage) { + this.externalLineageAPI = externalLineage; + return this; + } + /** Replace the default ExternalLocationsService with a custom implementation. */ public WorkspaceClient withExternalLocationsImpl(ExternalLocationsService externalLocations) { return this.withExternalLocationsAPI(new ExternalLocationsAPI(externalLocations)); @@ -2173,6 +2216,17 @@ public WorkspaceClient withExternalLocationsAPI(ExternalLocationsAPI externalLoc return this; } + /** Replace the default ExternalMetadataService with a custom implementation. */ + public WorkspaceClient withExternalMetadataImpl(ExternalMetadataService externalMetadata) { + return this.withExternalMetadataAPI(new ExternalMetadataAPI(externalMetadata)); + } + + /** Replace the default ExternalMetadataAPI with a custom implementation. */ + public WorkspaceClient withExternalMetadataAPI(ExternalMetadataAPI externalMetadata) { + this.externalMetadataAPI = externalMetadata; + return this; + } + /** Replace the default FeatureStoreService with a custom implementation. */ public WorkspaceClient withFeatureStoreImpl(FeatureStoreService featureStore) { return this.withFeatureStoreAPI(new FeatureStoreAPI(featureStore)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java index 4b7e6f09c..112c28fb6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java @@ -24,7 +24,7 @@ public void cancelOptimize(CancelCustomLlmOptimizationRunRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, CancelOptimizeResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -51,7 +51,7 @@ public void deleteCustomLlm(DeleteCustomLlmRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteCustomLlmResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java index 40dc96f30..19ef6f88c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java @@ -13,7 +13,7 @@ public class AppAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private AppPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java index 39fcd6726..2388ba9d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java @@ -18,7 +18,7 @@ public class AppPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private AppPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java index 94fb8781f..9acd12321 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java @@ -13,7 +13,7 @@ public class AppPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private AppPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java index 06ac40723..c89697997 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java @@ -10,7 +10,7 @@ @Generated public class CreateAppDeploymentRequest { - /** */ + /** The app deployment configuration. */ @JsonProperty("app_deployment") private AppDeployment appDeployment; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java index dcf84cf42..dcf526b90 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java @@ -41,7 +41,7 @@ public void delete(DeleteBudgetPolicyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java index 7bf0119d8..fa1849308 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java @@ -40,7 +40,7 @@ public void delete(DeleteBudgetConfigurationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteBudgetConfigurationResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java deleted file mode 100755 index 32bb2a520..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.billing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteBudgetConfigurationResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteBudgetConfigurationResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java index 187955234..9efcfe94f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java @@ -72,7 +72,7 @@ public void patchStatus(UpdateLogDeliveryConfigurationStatusRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PatchStatusResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java deleted file mode 100755 index aa3dee4e9..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.billing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class PatchStatusResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(PatchStatusResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java index 42ae51679..baafb73c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java @@ -16,7 +16,11 @@ public class UpdateBudgetPolicyRequest { @QueryParam("limit_config") private LimitConfig limitConfig; - /** Contains the BudgetPolicy details. */ + /** + * The policy to update. `creator_user_id` cannot be specified in the request. All other fields + * must be specified even if not changed. The `policy_id` is used to identify the policy to + * update. + */ @JsonProperty("policy") private BudgetPolicy policy; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java index 1cf2ed48e..aa9eb7cb8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java @@ -10,7 +10,7 @@ /** * Properties of the new log delivery configuration. */ @Generated public class WrappedCreateLogDeliveryConfiguration { - /** * Log Delivery Configuration */ + /** */ @JsonProperty("log_delivery_configuration") private CreateLogDeliveryConfigurationParams logDeliveryConfiguration; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java index ad7867175..f0418c91e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java @@ -27,7 +27,7 @@ public void create(AccountsCreateMetastoreAssignment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, CreateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -43,7 +43,7 @@ public void delete(DeleteAccountMetastoreAssignmentRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -93,7 +93,7 @@ public void update(AccountsUpdateMetastoreAssignment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java index b8a166770..56328c4f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java @@ -40,7 +40,7 @@ public void delete(DeleteAccountMetastoreRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java index fdb1c75e5..5eb10df59 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java @@ -45,7 +45,7 @@ public void delete(DeleteAccountStorageCredentialRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java index ebf035131..2b036538e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java @@ -15,7 +15,7 @@ public class AwsSqsQueue { /** * The AQS queue url in the format https://sqs.{region}.amazonaws.com/{account id}/{queue name} - * REQUIRED for provided_sqs. + * Required for provided_sqs. */ @JsonProperty("queue_url") private String queueUrl; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java index 060de1960..3987c0305 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java @@ -15,21 +15,21 @@ public class AzureQueueStorage { /** * The AQS queue url in the format https://{storage account}.queue.core.windows.net/{queue name} - * REQUIRED for provided_aqs. + * Required for provided_aqs. */ @JsonProperty("queue_url") private String queueUrl; /** * The resource group for the queue, event grid subscription, and external location storage - * account. ONLY REQUIRED for locations with a service principal storage credential + * account. Only required for locations with a service principal storage credential */ @JsonProperty("resource_group") private String resourceGroup; /** - * OPTIONAL: The subscription id for the queue, event grid subscription, and external location - * storage account. REQUIRED for locations with a service principal storage credential + * Optional subscription id for the queue, event grid subscription, and external location storage + * account. Required for locations with a service principal storage credential */ @JsonProperty("subscription_id") private String subscriptionId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java deleted file mode 100755 index 90b729a13..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CancelRefreshResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CancelRefreshResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java index ccdc57264..170f10432 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java @@ -17,7 +17,7 @@ public class CatalogInfo { @JsonProperty("browse_only") private Boolean browseOnly; - /** The type of the catalog. */ + /** */ @JsonProperty("catalog_type") private CatalogType catalogType; @@ -85,11 +85,11 @@ public class CatalogInfo { @JsonProperty("provider_name") private String providerName; - /** Status of an asynchronously provisioned resource. */ + /** */ @JsonProperty("provisioning_info") private ProvisioningInfo provisioningInfo; - /** The type of Unity Catalog securable. */ + /** */ @JsonProperty("securable_type") private SecurableType securableType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java index 29649052b..1149a8b92 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteCatalogRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnRelationship.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnRelationship.java new file mode 100755 index 000000000..f08281b4c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnRelationship.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ColumnRelationship { + /** */ + @JsonProperty("source") + private String source; + + /** */ + @JsonProperty("target") + private String target; + + public ColumnRelationship setSource(String source) { + this.source = source; + return this; + } + + public String getSource() { + return source; + } + + public ColumnRelationship setTarget(String target) { + this.target = target; + return this; + } + + public String getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ColumnRelationship that = (ColumnRelationship) o; + return Objects.equals(source, that.source) && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(source, target); + } + + @Override + public String toString() { + return new ToStringer(ColumnRelationship.class) + .add("source", source) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java index 496800340..8784fa394 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java @@ -58,7 +58,7 @@ public class ConnectionInfo { @JsonProperty("properties") private Map properties; - /** Status of an asynchronously provisioned resource. */ + /** */ @JsonProperty("provisioning_info") private ProvisioningInfo provisioningInfo; @@ -66,7 +66,7 @@ public class ConnectionInfo { @JsonProperty("read_only") private Boolean readOnly; - /** The type of Unity Catalog securable. */ + /** */ @JsonProperty("securable_type") private SecurableType securableType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index 0c5d5437b..f9da6f8f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 35 */ +/** Next Id: 36 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java index cbd09dede..e0088505f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteConnectionRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLineageRelationshipRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLineageRelationshipRequest.java new file mode 100755 index 000000000..dc3956c72 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLineageRelationshipRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateExternalLineageRelationshipRequest { + /** */ + @JsonProperty("external_lineage_relationship") + private CreateRequestExternalLineage externalLineageRelationship; + + public CreateExternalLineageRelationshipRequest setExternalLineageRelationship( + CreateRequestExternalLineage externalLineageRelationship) { + this.externalLineageRelationship = externalLineageRelationship; + return this; + } + + public CreateRequestExternalLineage getExternalLineageRelationship() { + return externalLineageRelationship; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExternalLineageRelationshipRequest that = (CreateExternalLineageRelationshipRequest) o; + return Objects.equals(externalLineageRelationship, that.externalLineageRelationship); + } + + @Override + public int hashCode() { + return Objects.hash(externalLineageRelationship); + } + + @Override + public String toString() { + return new ToStringer(CreateExternalLineageRelationshipRequest.class) + .add("externalLineageRelationship", externalLineageRelationship) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java index 585c5876d..f2a8fb83c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java @@ -17,11 +17,11 @@ public class CreateExternalLocation { @JsonProperty("credential_name") private String credentialName; - /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ + /** Whether to enable file events on this external location. */ @JsonProperty("enable_file_events") private Boolean enableFileEvents; - /** Encryption options that apply to clients connecting to cloud storage. */ + /** */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; @@ -33,7 +33,7 @@ public class CreateExternalLocation { @JsonProperty("fallback") private Boolean fallback; - /** [Create:OPT Update:OPT] File event queue settings. */ + /** File event queue settings. */ @JsonProperty("file_event_queue") private FileEventQueue fileEventQueue; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalMetadataRequest.java new file mode 100755 index 000000000..46c9bc20d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalMetadataRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateExternalMetadataRequest { + /** */ + @JsonProperty("external_metadata") + private ExternalMetadata externalMetadata; + + public CreateExternalMetadataRequest setExternalMetadata(ExternalMetadata externalMetadata) { + this.externalMetadata = externalMetadata; + return this; + } + + public ExternalMetadata getExternalMetadata() { + return externalMetadata; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExternalMetadataRequest that = (CreateExternalMetadataRequest) o; + return Objects.equals(externalMetadata, that.externalMetadata); + } + + @Override + public int hashCode() { + return Objects.hash(externalMetadata); + } + + @Override + public String toString() { + return new ToStringer(CreateExternalMetadataRequest.class) + .add("externalMetadata", externalMetadata) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java index c58e4bd9e..fd171c270 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java @@ -9,7 +9,7 @@ @Generated public class CreateOnlineTableRequest { - /** Online Table information. */ + /** Specification of the online table to be created. */ @JsonProperty("table") private OnlineTable table; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java new file mode 100755 index 000000000..c8ba0491e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class CreateRequestExternalLineage { + /** List of column relationships between source and target objects. */ + @JsonProperty("columns") + private Collection columns; + + /** Unique identifier of the external lineage relationship. */ + @JsonProperty("id") + private String id; + + /** Key-value properties associated with the external lineage relationship. */ + @JsonProperty("properties") + private Map properties; + + /** Source object of the external lineage relationship. */ + @JsonProperty("source") + private ExternalLineageObject source; + + /** Target object of the external lineage relationship. */ + @JsonProperty("target") + private ExternalLineageObject target; + + public CreateRequestExternalLineage setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public CreateRequestExternalLineage setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CreateRequestExternalLineage setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public CreateRequestExternalLineage setSource(ExternalLineageObject source) { + this.source = source; + return this; + } + + public ExternalLineageObject getSource() { + return source; + } + + public CreateRequestExternalLineage setTarget(ExternalLineageObject target) { + this.target = target; + return this; + } + + public ExternalLineageObject getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRequestExternalLineage that = (CreateRequestExternalLineage) o; + return Objects.equals(columns, that.columns) + && Objects.equals(id, that.id) + && Objects.equals(properties, that.properties) + && Objects.equals(source, that.source) + && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(columns, id, properties, source, target); + } + + @Override + public String toString() { + return new ToStringer(CreateRequestExternalLineage.class) + .add("columns", columns) + .add("id", id) + .add("properties", properties) + .add("source", source) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java index d8cbfdde8..0438f8c7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java @@ -9,10 +9,7 @@ @Generated public class CreateTableConstraint { - /** - * A table constraint, as defined by *one* of the following fields being set: - * __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. - */ + /** */ @JsonProperty("constraint") private TableConstraint constraint; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java index bbe39faf4..16f0ebbc6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java @@ -29,13 +29,7 @@ public class CreateVolumeRequestContent { @JsonProperty("storage_location") private String storageLocation; - /** - * The type of the volume. An external volume is located in the specified external location. A - * managed volume is located in the default location which is specified by the parent schema, or - * the parent catalog, or the Metastore. [Learn more] - * - *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external - */ + /** */ @JsonProperty("volume_type") private VolumeType volumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java index b5f06caf4..6172c00db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java @@ -4,9 +4,10 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 12 */ +/** Next Id: 13 */ @Generated public enum CredentialType { + ANY_STATIC_CREDENTIAL, BEARER_TOKEN, OAUTH_ACCESS_TOKEN, OAUTH_M2M, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java index 1557d0944..ebcce0da4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java @@ -37,7 +37,7 @@ public void deleteCredential(DeleteCredentialRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteCredentialResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java deleted file mode 100755 index 1ad278759..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteCredentialResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteCredentialResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java new file mode 100755 index 000000000..d74cea315 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteExternalLineageRelationshipRequest { + /** */ + @JsonIgnore + @QueryParam("external_lineage_relationship") + private DeleteRequestExternalLineage externalLineageRelationship; + + public DeleteExternalLineageRelationshipRequest setExternalLineageRelationship( + DeleteRequestExternalLineage externalLineageRelationship) { + this.externalLineageRelationship = externalLineageRelationship; + return this; + } + + public DeleteRequestExternalLineage getExternalLineageRelationship() { + return externalLineageRelationship; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExternalLineageRelationshipRequest that = (DeleteExternalLineageRelationshipRequest) o; + return Objects.equals(externalLineageRelationship, that.externalLineageRelationship); + } + + @Override + public int hashCode() { + return Objects.hash(externalLineageRelationship); + } + + @Override + public String toString() { + return new ToStringer(DeleteExternalLineageRelationshipRequest.class) + .add("externalLineageRelationship", externalLineageRelationship) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java similarity index 78% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java index 9bb22645b..dcab20f54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class DeleteAliasResponse { +public class DeleteExternalLineageRelationshipResponse { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteAliasResponse.class).toString(); + return new ToStringer(DeleteExternalLineageRelationshipResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java new file mode 100755 index 000000000..f1055606d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteExternalMetadataRequest { + /** */ + @JsonIgnore private String name; + + public DeleteExternalMetadataRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExternalMetadataRequest that = (DeleteExternalMetadataRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteExternalMetadataRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java similarity index 81% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java index c2be0e3c2..836fc0483 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class CreateResponse { +public class DeleteExternalMetadataResponse { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CreateResponse.class).toString(); + return new ToStringer(DeleteExternalMetadataResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java new file mode 100755 index 000000000..4a6d348b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DeleteRequestExternalLineage { + /** Unique identifier of the external lineage relationship. */ + @JsonProperty("id") + @QueryParam("id") + private String id; + + /** Source object of the external lineage relationship. */ + @JsonProperty("source") + @QueryParam("source") + private ExternalLineageObject source; + + /** Target object of the external lineage relationship. */ + @JsonProperty("target") + @QueryParam("target") + private ExternalLineageObject target; + + public DeleteRequestExternalLineage setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public DeleteRequestExternalLineage setSource(ExternalLineageObject source) { + this.source = source; + return this; + } + + public ExternalLineageObject getSource() { + return source; + } + + public DeleteRequestExternalLineage setTarget(ExternalLineageObject target) { + this.target = target; + return this; + } + + public ExternalLineageObject getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteRequestExternalLineage that = (DeleteRequestExternalLineage) o; + return Objects.equals(id, that.id) + && Objects.equals(source, that.source) + && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(id, source, target); + } + + @Override + public String toString() { + return new ToStringer(DeleteRequestExternalLineage.class) + .add("id", id) + .add("source", source) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java deleted file mode 100755 index 72c77810c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java index 5ace4876d..11a937f7d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java @@ -13,19 +13,19 @@ */ @Generated public class Dependency { - /** A connection that is dependent on a SQL object. */ + /** */ @JsonProperty("connection") private ConnectionDependency connection; - /** A credential that is dependent on a SQL object. */ + /** */ @JsonProperty("credential") private CredentialDependency credential; - /** A function that is dependent on a SQL object. */ + /** */ @JsonProperty("function") private FunctionDependency function; - /** A table that is dependent on a SQL object. */ + /** */ @JsonProperty("table") private TableDependency table; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java deleted file mode 100755 index 682492951..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DisableResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DisableResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java deleted file mode 100755 index 72ccaf810..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class EnableResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(EnableResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java new file mode 100755 index 000000000..4fc7f867b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java @@ -0,0 +1,112 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * External Lineage APIs enable defining and managing lineage relationships between Databricks + * objects and external systems. These APIs allow users to capture data flows connecting Databricks + * tables, models, and file paths with external metadata objects. + * + *

With these APIs, users can create, update, delete, and list lineage relationships with support + * for column-level mappings and custom properties. + */ +@Generated +public class ExternalLineageAPI { + private static final Logger LOG = LoggerFactory.getLogger(ExternalLineageAPI.class); + + private final ExternalLineageService impl; + + /** Regular-use constructor */ + public ExternalLineageAPI(ApiClient apiClient) { + impl = new ExternalLineageImpl(apiClient); + } + + /** Constructor for mocks */ + public ExternalLineageAPI(ExternalLineageService mock) { + impl = mock; + } + + public ExternalLineageRelationship createExternalLineageRelationship( + CreateRequestExternalLineage externalLineageRelationship) { + return createExternalLineageRelationship( + new CreateExternalLineageRelationshipRequest() + .setExternalLineageRelationship(externalLineageRelationship)); + } + + /** + * Creates an external lineage relationship between a Databricks or external metadata object and + * another external metadata object. + */ + public ExternalLineageRelationship createExternalLineageRelationship( + CreateExternalLineageRelationshipRequest request) { + return impl.createExternalLineageRelationship(request); + } + + public void deleteExternalLineageRelationship( + DeleteRequestExternalLineage externalLineageRelationship) { + deleteExternalLineageRelationship( + new DeleteExternalLineageRelationshipRequest() + .setExternalLineageRelationship(externalLineageRelationship)); + } + + /** + * Deletes an external lineage relationship between a Databricks or external metadata object and + * another external metadata object. + */ + public void deleteExternalLineageRelationship(DeleteExternalLineageRelationshipRequest request) { + impl.deleteExternalLineageRelationship(request); + } + + public Iterable listExternalLineageRelationships( + ExternalLineageObject objectInfo, LineageDirection lineageDirection) { + return listExternalLineageRelationships( + new ListExternalLineageRelationshipsRequest() + .setObjectInfo(objectInfo) + .setLineageDirection(lineageDirection)); + } + + /** + * Lists external lineage relationships of a Databricks object or external metadata given a + * supplied direction. + */ + public Iterable listExternalLineageRelationships( + ListExternalLineageRelationshipsRequest request) { + return new Paginator<>( + request, + impl::listExternalLineageRelationships, + ListExternalLineageRelationshipsResponse::getExternalLineageRelationships, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public ExternalLineageRelationship updateExternalLineageRelationship( + UpdateRequestExternalLineage externalLineageRelationship, String updateMask) { + return updateExternalLineageRelationship( + new UpdateExternalLineageRelationshipRequest() + .setExternalLineageRelationship(externalLineageRelationship) + .setUpdateMask(updateMask)); + } + + /** + * Updates an external lineage relationship between a Databricks or external metadata object and + * another external metadata object. + */ + public ExternalLineageRelationship updateExternalLineageRelationship( + UpdateExternalLineageRelationshipRequest request) { + return impl.updateExternalLineageRelationship(request); + } + + public ExternalLineageService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadata.java new file mode 100755 index 000000000..9fe27b0fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadata.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalLineageExternalMetadata { + /** */ + @JsonProperty("name") + private String name; + + public ExternalLineageExternalMetadata setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageExternalMetadata that = (ExternalLineageExternalMetadata) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageExternalMetadata.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadataInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadataInfo.java new file mode 100755 index 000000000..7730b3b3f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadataInfo.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents the external metadata object in the lineage event. */ +@Generated +public class ExternalLineageExternalMetadataInfo { + /** Type of entity represented by the external metadata object. */ + @JsonProperty("entity_type") + private String entityType; + + /** Timestamp of the lineage event. */ + @JsonProperty("event_time") + private String eventTime; + + /** Name of the external metadata object. */ + @JsonProperty("name") + private String name; + + /** Type of external system. */ + @JsonProperty("system_type") + private SystemType systemType; + + public ExternalLineageExternalMetadataInfo setEntityType(String entityType) { + this.entityType = entityType; + return this; + } + + public String getEntityType() { + return entityType; + } + + public ExternalLineageExternalMetadataInfo setEventTime(String eventTime) { + this.eventTime = eventTime; + return this; + } + + public String getEventTime() { + return eventTime; + } + + public ExternalLineageExternalMetadataInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExternalLineageExternalMetadataInfo setSystemType(SystemType systemType) { + this.systemType = systemType; + return this; + } + + public SystemType getSystemType() { + return systemType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageExternalMetadataInfo that = (ExternalLineageExternalMetadataInfo) o; + return Objects.equals(entityType, that.entityType) + && Objects.equals(eventTime, that.eventTime) + && Objects.equals(name, that.name) + && Objects.equals(systemType, that.systemType); + } + + @Override + public int hashCode() { + return Objects.hash(entityType, eventTime, name, systemType); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageExternalMetadataInfo.class) + .add("entityType", entityType) + .add("eventTime", eventTime) + .add("name", name) + .add("systemType", systemType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageFileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageFileInfo.java new file mode 100755 index 000000000..ec9ba66e5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageFileInfo.java @@ -0,0 +1,105 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents the path information in the lineage event. */ +@Generated +public class ExternalLineageFileInfo { + /** Timestamp of the lineage event. */ + @JsonProperty("event_time") + private String eventTime; + + /** URL of the path. */ + @JsonProperty("path") + private String path; + + /** The full name of the securable on the path. */ + @JsonProperty("securable_name") + private String securableName; + + /** The securable type of the securable on the path. */ + @JsonProperty("securable_type") + private String securableType; + + /** The storage location associated with securable on the path. */ + @JsonProperty("storage_location") + private String storageLocation; + + public ExternalLineageFileInfo setEventTime(String eventTime) { + this.eventTime = eventTime; + return this; + } + + public String getEventTime() { + return eventTime; + } + + public ExternalLineageFileInfo setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public ExternalLineageFileInfo setSecurableName(String securableName) { + this.securableName = securableName; + return this; + } + + public String getSecurableName() { + return securableName; + } + + public ExternalLineageFileInfo setSecurableType(String securableType) { + this.securableType = securableType; + return this; + } + + public String getSecurableType() { + return securableType; + } + + public ExternalLineageFileInfo setStorageLocation(String storageLocation) { + this.storageLocation = storageLocation; + return this; + } + + public String getStorageLocation() { + return storageLocation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageFileInfo that = (ExternalLineageFileInfo) o; + return Objects.equals(eventTime, that.eventTime) + && Objects.equals(path, that.path) + && Objects.equals(securableName, that.securableName) + && Objects.equals(securableType, that.securableType) + && Objects.equals(storageLocation, that.storageLocation); + } + + @Override + public int hashCode() { + return Objects.hash(eventTime, path, securableName, securableType, storageLocation); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageFileInfo.class) + .add("eventTime", eventTime) + .add("path", path) + .add("securableName", securableName) + .add("securableType", securableType) + .add("storageLocation", storageLocation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageImpl.java new file mode 100755 index 000000000..3748d87ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageImpl.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of ExternalLineage */ +@Generated +class ExternalLineageImpl implements ExternalLineageService { + private final ApiClient apiClient; + + public ExternalLineageImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public ExternalLineageRelationship createExternalLineageRelationship( + CreateExternalLineageRelationshipRequest request) { + String path = "/api/2.0/lineage-tracking/external-lineage"; + try { + Request req = + new Request("POST", path, apiClient.serialize(request.getExternalLineageRelationship())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExternalLineageRelationship.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteExternalLineageRelationship(DeleteExternalLineageRelationshipRequest request) { + String path = "/api/2.0/lineage-tracking/external-lineage"; + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListExternalLineageRelationshipsResponse listExternalLineageRelationships( + ListExternalLineageRelationshipsRequest request) { + String path = "/api/2.0/lineage-tracking/external-lineage"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListExternalLineageRelationshipsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ExternalLineageRelationship updateExternalLineageRelationship( + UpdateExternalLineageRelationshipRequest request) { + String path = "/api/2.0/lineage-tracking/external-lineage"; + try { + Request req = + new Request("PATCH", path, apiClient.serialize(request.getExternalLineageRelationship())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExternalLineageRelationship.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageInfo.java new file mode 100755 index 000000000..73d2ec2dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageInfo.java @@ -0,0 +1,107 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Lineage response containing lineage information of a data asset. */ +@Generated +public class ExternalLineageInfo { + /** Information about the edge metadata of the external lineage relationship. */ + @JsonProperty("external_lineage_info") + private ExternalLineageRelationshipInfo externalLineageInfo; + + /** Information about external metadata involved in the lineage relationship. */ + @JsonProperty("external_metadata_info") + private ExternalLineageExternalMetadataInfo externalMetadataInfo; + + /** Information about the file involved in the lineage relationship. */ + @JsonProperty("file_info") + private ExternalLineageFileInfo fileInfo; + + /** Information about the model version involved in the lineage relationship. */ + @JsonProperty("model_info") + private ExternalLineageModelVersionInfo modelInfo; + + /** Information about the table involved in the lineage relationship. */ + @JsonProperty("table_info") + private ExternalLineageTableInfo tableInfo; + + public ExternalLineageInfo setExternalLineageInfo( + ExternalLineageRelationshipInfo externalLineageInfo) { + this.externalLineageInfo = externalLineageInfo; + return this; + } + + public ExternalLineageRelationshipInfo getExternalLineageInfo() { + return externalLineageInfo; + } + + public ExternalLineageInfo setExternalMetadataInfo( + ExternalLineageExternalMetadataInfo externalMetadataInfo) { + this.externalMetadataInfo = externalMetadataInfo; + return this; + } + + public ExternalLineageExternalMetadataInfo getExternalMetadataInfo() { + return externalMetadataInfo; + } + + public ExternalLineageInfo setFileInfo(ExternalLineageFileInfo fileInfo) { + this.fileInfo = fileInfo; + return this; + } + + public ExternalLineageFileInfo getFileInfo() { + return fileInfo; + } + + public ExternalLineageInfo setModelInfo(ExternalLineageModelVersionInfo modelInfo) { + this.modelInfo = modelInfo; + return this; + } + + public ExternalLineageModelVersionInfo getModelInfo() { + return modelInfo; + } + + public ExternalLineageInfo setTableInfo(ExternalLineageTableInfo tableInfo) { + this.tableInfo = tableInfo; + return this; + } + + public ExternalLineageTableInfo getTableInfo() { + return tableInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageInfo that = (ExternalLineageInfo) o; + return Objects.equals(externalLineageInfo, that.externalLineageInfo) + && Objects.equals(externalMetadataInfo, that.externalMetadataInfo) + && Objects.equals(fileInfo, that.fileInfo) + && Objects.equals(modelInfo, that.modelInfo) + && Objects.equals(tableInfo, that.tableInfo); + } + + @Override + public int hashCode() { + return Objects.hash(externalLineageInfo, externalMetadataInfo, fileInfo, modelInfo, tableInfo); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageInfo.class) + .add("externalLineageInfo", externalLineageInfo) + .add("externalMetadataInfo", externalMetadataInfo) + .add("fileInfo", fileInfo) + .add("modelInfo", modelInfo) + .add("tableInfo", tableInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersion.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersion.java new file mode 100755 index 000000000..3809dec2b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersion.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalLineageModelVersion { + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("version") + private String version; + + public ExternalLineageModelVersion setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExternalLineageModelVersion setVersion(String version) { + this.version = version; + return this; + } + + public String getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageModelVersion that = (ExternalLineageModelVersion) o; + return Objects.equals(name, that.name) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageModelVersion.class) + .add("name", name) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersionInfo.java new file mode 100755 index 000000000..1bf1ee15b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersionInfo.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents the model version information in the lineage event. */ +@Generated +public class ExternalLineageModelVersionInfo { + /** Timestamp of the lineage event. */ + @JsonProperty("event_time") + private String eventTime; + + /** Name of the model. */ + @JsonProperty("model_name") + private String modelName; + + /** Version number of the model. */ + @JsonProperty("version") + private Long version; + + public ExternalLineageModelVersionInfo setEventTime(String eventTime) { + this.eventTime = eventTime; + return this; + } + + public String getEventTime() { + return eventTime; + } + + public ExternalLineageModelVersionInfo setModelName(String modelName) { + this.modelName = modelName; + return this; + } + + public String getModelName() { + return modelName; + } + + public ExternalLineageModelVersionInfo setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageModelVersionInfo that = (ExternalLineageModelVersionInfo) o; + return Objects.equals(eventTime, that.eventTime) + && Objects.equals(modelName, that.modelName) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(eventTime, modelName, version); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageModelVersionInfo.class) + .add("eventTime", eventTime) + .add("modelName", modelName) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageObject.java new file mode 100755 index 000000000..209a7b529 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageObject.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalLineageObject { + /** */ + @JsonProperty("external_metadata") + private ExternalLineageExternalMetadata externalMetadata; + + /** */ + @JsonProperty("model_version") + private ExternalLineageModelVersion modelVersion; + + /** */ + @JsonProperty("path") + private ExternalLineagePath path; + + /** */ + @JsonProperty("table") + private ExternalLineageTable table; + + public ExternalLineageObject setExternalMetadata( + ExternalLineageExternalMetadata externalMetadata) { + this.externalMetadata = externalMetadata; + return this; + } + + public ExternalLineageExternalMetadata getExternalMetadata() { + return externalMetadata; + } + + public ExternalLineageObject setModelVersion(ExternalLineageModelVersion modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public ExternalLineageModelVersion getModelVersion() { + return modelVersion; + } + + public ExternalLineageObject setPath(ExternalLineagePath path) { + this.path = path; + return this; + } + + public ExternalLineagePath getPath() { + return path; + } + + public ExternalLineageObject setTable(ExternalLineageTable table) { + this.table = table; + return this; + } + + public ExternalLineageTable getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageObject that = (ExternalLineageObject) o; + return Objects.equals(externalMetadata, that.externalMetadata) + && Objects.equals(modelVersion, that.modelVersion) + && Objects.equals(path, that.path) + && Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(externalMetadata, modelVersion, path, table); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageObject.class) + .add("externalMetadata", externalMetadata) + .add("modelVersion", modelVersion) + .add("path", path) + .add("table", table) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineagePath.java similarity index 50% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineagePath.java index d186a2af0..cdeb11b0d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineagePath.java @@ -4,25 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @Generated -public class AssignResponse { +public class ExternalLineagePath { + /** */ + @JsonProperty("url") + private String url; + + public ExternalLineagePath setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + ExternalLineagePath that = (ExternalLineagePath) o; + return Objects.equals(url, that.url); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(url); } @Override public String toString() { - return new ToStringer(AssignResponse.class).toString(); + return new ToStringer(ExternalLineagePath.class).add("url", url).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationship.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationship.java new file mode 100755 index 000000000..313eedb5a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationship.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class ExternalLineageRelationship { + /** List of column relationships between source and target objects. */ + @JsonProperty("columns") + private Collection columns; + + /** Unique identifier of the external lineage relationship. */ + @JsonProperty("id") + private String id; + + /** Key-value properties associated with the external lineage relationship. */ + @JsonProperty("properties") + private Map properties; + + /** Source object of the external lineage relationship. */ + @JsonProperty("source") + private ExternalLineageObject source; + + /** Target object of the external lineage relationship. */ + @JsonProperty("target") + private ExternalLineageObject target; + + public ExternalLineageRelationship setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public ExternalLineageRelationship setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExternalLineageRelationship setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public ExternalLineageRelationship setSource(ExternalLineageObject source) { + this.source = source; + return this; + } + + public ExternalLineageObject getSource() { + return source; + } + + public ExternalLineageRelationship setTarget(ExternalLineageObject target) { + this.target = target; + return this; + } + + public ExternalLineageObject getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageRelationship that = (ExternalLineageRelationship) o; + return Objects.equals(columns, that.columns) + && Objects.equals(id, that.id) + && Objects.equals(properties, that.properties) + && Objects.equals(source, that.source) + && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(columns, id, properties, source, target); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageRelationship.class) + .add("columns", columns) + .add("id", id) + .add("properties", properties) + .add("source", source) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationshipInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationshipInfo.java new file mode 100755 index 000000000..65a6fd1bd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationshipInfo.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class ExternalLineageRelationshipInfo { + /** List of column relationships between source and target objects. */ + @JsonProperty("columns") + private Collection columns; + + /** Unique identifier of the external lineage relationship. */ + @JsonProperty("id") + private String id; + + /** Key-value properties associated with the external lineage relationship. */ + @JsonProperty("properties") + private Map properties; + + /** Source object of the external lineage relationship. */ + @JsonProperty("source") + private ExternalLineageObject source; + + /** Target object of the external lineage relationship. */ + @JsonProperty("target") + private ExternalLineageObject target; + + public ExternalLineageRelationshipInfo setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public ExternalLineageRelationshipInfo setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExternalLineageRelationshipInfo setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public ExternalLineageRelationshipInfo setSource(ExternalLineageObject source) { + this.source = source; + return this; + } + + public ExternalLineageObject getSource() { + return source; + } + + public ExternalLineageRelationshipInfo setTarget(ExternalLineageObject target) { + this.target = target; + return this; + } + + public ExternalLineageObject getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageRelationshipInfo that = (ExternalLineageRelationshipInfo) o; + return Objects.equals(columns, that.columns) + && Objects.equals(id, that.id) + && Objects.equals(properties, that.properties) + && Objects.equals(source, that.source) + && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(columns, id, properties, source, target); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageRelationshipInfo.class) + .add("columns", columns) + .add("id", id) + .add("properties", properties) + .add("source", source) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageService.java new file mode 100755 index 000000000..dc698c9c7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageService.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * External Lineage APIs enable defining and managing lineage relationships between Databricks + * objects and external systems. These APIs allow users to capture data flows connecting Databricks + * tables, models, and file paths with external metadata objects. + * + *

With these APIs, users can create, update, delete, and list lineage relationships with support + * for column-level mappings and custom properties. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ExternalLineageService { + /** + * Creates an external lineage relationship between a Databricks or external metadata object and + * another external metadata object. + */ + ExternalLineageRelationship createExternalLineageRelationship( + CreateExternalLineageRelationshipRequest createExternalLineageRelationshipRequest); + + /** + * Deletes an external lineage relationship between a Databricks or external metadata object and + * another external metadata object. + */ + void deleteExternalLineageRelationship( + DeleteExternalLineageRelationshipRequest deleteExternalLineageRelationshipRequest); + + /** + * Lists external lineage relationships of a Databricks object or external metadata given a + * supplied direction. + */ + ListExternalLineageRelationshipsResponse listExternalLineageRelationships( + ListExternalLineageRelationshipsRequest listExternalLineageRelationshipsRequest); + + /** + * Updates an external lineage relationship between a Databricks or external metadata object and + * another external metadata object. + */ + ExternalLineageRelationship updateExternalLineageRelationship( + UpdateExternalLineageRelationshipRequest updateExternalLineageRelationshipRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java new file mode 100755 index 000000000..329ca5930 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalLineageTable { + /** */ + @JsonProperty("name") + private String name; + + public ExternalLineageTable setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageTable that = (ExternalLineageTable) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageTable.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java new file mode 100755 index 000000000..8d31a384f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Represents the table information in the lineage event. */ +@Generated +public class ExternalLineageTableInfo { + /** Name of Catalog. */ + @JsonProperty("catalog_name") + private String catalogName; + + /** Timestamp of the lineage event. */ + @JsonProperty("event_time") + private String eventTime; + + /** Name of Table. */ + @JsonProperty("name") + private String name; + + /** Name of Schema. */ + @JsonProperty("schema_name") + private String schemaName; + + public ExternalLineageTableInfo setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ExternalLineageTableInfo setEventTime(String eventTime) { + this.eventTime = eventTime; + return this; + } + + public String getEventTime() { + return eventTime; + } + + public ExternalLineageTableInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExternalLineageTableInfo setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalLineageTableInfo that = (ExternalLineageTableInfo) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(eventTime, that.eventTime) + && Objects.equals(name, that.name) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, eventTime, name, schemaName); + } + + @Override + public String toString() { + return new ToStringer(ExternalLineageTableInfo.class) + .add("catalogName", catalogName) + .add("eventTime", eventTime) + .add("name", name) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java index b8a003e2b..0827a081c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java @@ -36,11 +36,11 @@ public class ExternalLocationInfo { @JsonProperty("credential_name") private String credentialName; - /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ + /** Whether to enable file events on this external location. */ @JsonProperty("enable_file_events") private Boolean enableFileEvents; - /** Encryption options that apply to clients connecting to cloud storage. */ + /** */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; @@ -52,7 +52,7 @@ public class ExternalLocationInfo { @JsonProperty("fallback") private Boolean fallback; - /** [Create:OPT Update:OPT] File event queue settings. */ + /** File event queue settings. */ @JsonProperty("file_event_queue") private FileEventQueue fileEventQueue; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java index 8c3107d8c..323cfed47 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteExternalLocationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java new file mode 100755 index 000000000..eb20f8f9a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java @@ -0,0 +1,255 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class ExternalMetadata { + /** List of columns associated with the external metadata object. */ + @JsonProperty("columns") + private Collection columns; + + /** Time at which this external metadata object was created. */ + @JsonProperty("create_time") + private String createTime; + + /** Username of external metadata object creator. */ + @JsonProperty("created_by") + private String createdBy; + + /** User-provided free-form text description. */ + @JsonProperty("description") + private String description; + + /** Type of entity within the external system. */ + @JsonProperty("entity_type") + private String entityType; + + /** Unique identifier of the external metadata object. */ + @JsonProperty("id") + private String id; + + /** Unique identifier of parent metastore. */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** Name of the external metadata object. */ + @JsonProperty("name") + private String name; + + /** Owner of the external metadata object. */ + @JsonProperty("owner") + private String owner; + + /** A map of key-value properties attached to the external metadata object. */ + @JsonProperty("properties") + private Map properties; + + /** Type of external system. */ + @JsonProperty("system_type") + private SystemType systemType; + + /** Time at which this external metadata object was last modified. */ + @JsonProperty("update_time") + private String updateTime; + + /** Username of user who last modified external metadata object. */ + @JsonProperty("updated_by") + private String updatedBy; + + /** URL associated with the external metadata object. */ + @JsonProperty("url") + private String url; + + public ExternalMetadata setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public ExternalMetadata setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public ExternalMetadata setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ExternalMetadata setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ExternalMetadata setEntityType(String entityType) { + this.entityType = entityType; + return this; + } + + public String getEntityType() { + return entityType; + } + + public ExternalMetadata setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExternalMetadata setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public ExternalMetadata setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExternalMetadata setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public ExternalMetadata setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public ExternalMetadata setSystemType(SystemType systemType) { + this.systemType = systemType; + return this; + } + + public SystemType getSystemType() { + return systemType; + } + + public ExternalMetadata setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public ExternalMetadata setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public ExternalMetadata setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalMetadata that = (ExternalMetadata) o; + return Objects.equals(columns, that.columns) + && Objects.equals(createTime, that.createTime) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(description, that.description) + && Objects.equals(entityType, that.entityType) + && Objects.equals(id, that.id) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(properties, that.properties) + && Objects.equals(systemType, that.systemType) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash( + columns, + createTime, + createdBy, + description, + entityType, + id, + metastoreId, + name, + owner, + properties, + systemType, + updateTime, + updatedBy, + url); + } + + @Override + public String toString() { + return new ToStringer(ExternalMetadata.class) + .add("columns", columns) + .add("createTime", createTime) + .add("createdBy", createdBy) + .add("description", description) + .add("entityType", entityType) + .add("id", id) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("properties", properties) + .add("systemType", systemType) + .add("updateTime", updateTime) + .add("updatedBy", updatedBy) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java new file mode 100755 index 000000000..7de3eeb4a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java @@ -0,0 +1,115 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * External Metadata objects enable customers to register and manage metadata about external systems + * within Unity Catalog. + * + *

These APIs provide a standardized way to create, update, retrieve, list, and delete external + * metadata objects. Fine-grained authorization ensures that only users with appropriate permissions + * can view and manage external metadata objects. + */ +@Generated +public class ExternalMetadataAPI { + private static final Logger LOG = LoggerFactory.getLogger(ExternalMetadataAPI.class); + + private final ExternalMetadataService impl; + + /** Regular-use constructor */ + public ExternalMetadataAPI(ApiClient apiClient) { + impl = new ExternalMetadataImpl(apiClient); + } + + /** Constructor for mocks */ + public ExternalMetadataAPI(ExternalMetadataService mock) { + impl = mock; + } + + public ExternalMetadata createExternalMetadata(ExternalMetadata externalMetadata) { + return createExternalMetadata( + new CreateExternalMetadataRequest().setExternalMetadata(externalMetadata)); + } + + /** + * Creates a new external metadata object in the parent metastore if the caller is a metastore + * admin or has the **CREATE_EXTERNAL_METADATA** privilege. Grants **BROWSE** to all account users + * upon creation by default. + */ + public ExternalMetadata createExternalMetadata(CreateExternalMetadataRequest request) { + return impl.createExternalMetadata(request); + } + + public void deleteExternalMetadata(String name) { + deleteExternalMetadata(new DeleteExternalMetadataRequest().setName(name)); + } + + /** + * Deletes the external metadata object that matches the supplied name. The caller must be a + * metastore admin, the owner of the external metadata object, or a user that has the **MANAGE** + * privilege. + */ + public void deleteExternalMetadata(DeleteExternalMetadataRequest request) { + impl.deleteExternalMetadata(request); + } + + public ExternalMetadata getExternalMetadata(String name) { + return getExternalMetadata(new GetExternalMetadataRequest().setName(name)); + } + + /** + * Gets the specified external metadata object in a metastore. The caller must be a metastore + * admin, the owner of the external metadata object, or a user that has the **BROWSE** privilege. + */ + public ExternalMetadata getExternalMetadata(GetExternalMetadataRequest request) { + return impl.getExternalMetadata(request); + } + + /** + * Gets an array of external metadata objects in the metastore. If the caller is the metastore + * admin, all external metadata objects will be retrieved. Otherwise, only external metadata + * objects that the caller has **BROWSE** on will be retrieved. There is no guarantee of a + * specific ordering of the elements in the array. + */ + public Iterable listExternalMetadata(ListExternalMetadataRequest request) { + return new Paginator<>( + request, + impl::listExternalMetadata, + ListExternalMetadataResponse::getExternalMetadata, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public ExternalMetadata updateExternalMetadata( + String name, ExternalMetadata externalMetadata, String updateMask) { + return updateExternalMetadata( + new UpdateExternalMetadataRequest() + .setName(name) + .setExternalMetadata(externalMetadata) + .setUpdateMask(updateMask)); + } + + /** + * Updates the external metadata object that matches the supplied name. The caller can only update + * either the owner or other metadata fields in one request. The caller must be a metastore admin, + * the owner of the external metadata object, or a user that has the **MODIFY** privilege. If the + * caller is updating the owner, they must also have the **MANAGE** privilege. + */ + public ExternalMetadata updateExternalMetadata(UpdateExternalMetadataRequest request) { + return impl.updateExternalMetadata(request); + } + + public ExternalMetadataService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataImpl.java new file mode 100755 index 000000000..b3047ae59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataImpl.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of ExternalMetadata */ +@Generated +class ExternalMetadataImpl implements ExternalMetadataService { + private final ApiClient apiClient; + + public ExternalMetadataImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public ExternalMetadata createExternalMetadata(CreateExternalMetadataRequest request) { + String path = "/api/2.0/lineage-tracking/external-metadata"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getExternalMetadata())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExternalMetadata.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteExternalMetadata(DeleteExternalMetadataRequest request) { + String path = + String.format("/api/2.0/lineage-tracking/external-metadata/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ExternalMetadata getExternalMetadata(GetExternalMetadataRequest request) { + String path = + String.format("/api/2.0/lineage-tracking/external-metadata/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ExternalMetadata.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListExternalMetadataResponse listExternalMetadata(ListExternalMetadataRequest request) { + String path = "/api/2.0/lineage-tracking/external-metadata"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListExternalMetadataResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ExternalMetadata updateExternalMetadata(UpdateExternalMetadataRequest request) { + String path = + String.format("/api/2.0/lineage-tracking/external-metadata/%s", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getExternalMetadata())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExternalMetadata.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataService.java new file mode 100755 index 000000000..40bb19ad4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataService.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * External Metadata objects enable customers to register and manage metadata about external systems + * within Unity Catalog. + * + *

These APIs provide a standardized way to create, update, retrieve, list, and delete external + * metadata objects. Fine-grained authorization ensures that only users with appropriate permissions + * can view and manage external metadata objects. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ExternalMetadataService { + /** + * Creates a new external metadata object in the parent metastore if the caller is a metastore + * admin or has the **CREATE_EXTERNAL_METADATA** privilege. Grants **BROWSE** to all account users + * upon creation by default. + */ + ExternalMetadata createExternalMetadata( + CreateExternalMetadataRequest createExternalMetadataRequest); + + /** + * Deletes the external metadata object that matches the supplied name. The caller must be a + * metastore admin, the owner of the external metadata object, or a user that has the **MANAGE** + * privilege. + */ + void deleteExternalMetadata(DeleteExternalMetadataRequest deleteExternalMetadataRequest); + + /** + * Gets the specified external metadata object in a metastore. The caller must be a metastore + * admin, the owner of the external metadata object, or a user that has the **BROWSE** privilege. + */ + ExternalMetadata getExternalMetadata(GetExternalMetadataRequest getExternalMetadataRequest); + + /** + * Gets an array of external metadata objects in the metastore. If the caller is the metastore + * admin, all external metadata objects will be retrieved. Otherwise, only external metadata + * objects that the caller has **BROWSE** on will be retrieved. There is no guarantee of a + * specific ordering of the elements in the array. + */ + ListExternalMetadataResponse listExternalMetadata( + ListExternalMetadataRequest listExternalMetadataRequest); + + /** + * Updates the external metadata object that matches the supplied name. The caller can only update + * either the owner or other metadata fields in one request. The caller must be a metastore admin, + * the owner of the external metadata object, or a user that has the **MODIFY** privilege. If the + * caller is updating the owner, they must also have the **MANAGE** privilege. + */ + ExternalMetadata updateExternalMetadata( + UpdateExternalMetadataRequest updateExternalMetadataRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java index 794ac8243..7e41e1dc0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java @@ -21,11 +21,11 @@ public class FunctionParameterInfo { @JsonProperty("parameter_default") private String parameterDefault; - /** The mode of the function parameter. */ + /** */ @JsonProperty("parameter_mode") private FunctionParameterMode parameterMode; - /** The type of function parameter. */ + /** */ @JsonProperty("parameter_type") private FunctionParameterType parameterType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java index 422449786..387db0b64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteFunctionRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java index b834a9ff0..226e0f45c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java @@ -15,7 +15,7 @@ public class GcpPubsub { /** * The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription - * name} REQUIRED for provided_pubsub. + * name} Required for provided_pubsub. */ @JsonProperty("subscription_name") private String subscriptionName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java index f768675fe..3a67980a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java @@ -9,7 +9,7 @@ @Generated public class GenerateTemporaryServiceCredentialRequest { - /** The Azure cloud options to customize the requested temporary credential */ + /** */ @JsonProperty("azure_options") private GenerateTemporaryServiceCredentialAzureOptions azureOptions; @@ -17,7 +17,7 @@ public class GenerateTemporaryServiceCredentialRequest { @JsonProperty("credential_name") private String credentialName; - /** The GCP cloud options to customize the requested temporary credential */ + /** */ @JsonProperty("gcp_options") private GenerateTemporaryServiceCredentialGcpOptions gcpOptions; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java index be752eec7..3f1f2cc48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java @@ -9,25 +9,15 @@ @Generated public class GenerateTemporaryTableCredentialResponse { - /** - * AWS temporary credentials for API authentication. Read more at - * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. - */ + /** */ @JsonProperty("aws_temp_credentials") private AwsCredentials awsTempCredentials; - /** - * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or - * Managed Identity. Read more at - * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token - */ + /** */ @JsonProperty("azure_aad") private AzureActiveDirectoryToken azureAad; - /** - * Azure temporary credentials for API authentication. Read more at - * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas - */ + /** */ @JsonProperty("azure_user_delegation_sas") private AzureUserDelegationSas azureUserDelegationSas; @@ -38,17 +28,11 @@ public class GenerateTemporaryTableCredentialResponse { @JsonProperty("expiration_time") private Long expirationTime; - /** - * GCP temporary credentials for API authentication. Read more at - * https://developers.google.com/identity/protocols/oauth2/service-account - */ + /** */ @JsonProperty("gcp_oauth_token") private GcpOauthToken gcpOauthToken; - /** - * R2 temporary credentials for API authentication. Read more at - * https://developers.cloudflare.com/r2/api/s3/tokens/. - */ + /** */ @JsonProperty("r2_temp_credentials") private R2Credentials r2TempCredentials; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java new file mode 100755 index 000000000..b797fe594 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetExternalMetadataRequest { + /** */ + @JsonIgnore private String name; + + public GetExternalMetadataRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExternalMetadataRequest that = (GetExternalMetadataRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetExternalMetadataRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java new file mode 100755 index 000000000..cbd8f066d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum LineageDirection { + DOWNSTREAM, + UPSTREAM, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java new file mode 100755 index 000000000..9db004efc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java @@ -0,0 +1,95 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListExternalLineageRelationshipsRequest { + /** The lineage direction to filter on. */ + @JsonIgnore + @QueryParam("lineage_direction") + private LineageDirection lineageDirection; + + /** The object to query external lineage relationship on. */ + @JsonIgnore + @QueryParam("object_info") + private ExternalLineageObject objectInfo; + + /** */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListExternalLineageRelationshipsRequest setLineageDirection( + LineageDirection lineageDirection) { + this.lineageDirection = lineageDirection; + return this; + } + + public LineageDirection getLineageDirection() { + return lineageDirection; + } + + public ListExternalLineageRelationshipsRequest setObjectInfo(ExternalLineageObject objectInfo) { + this.objectInfo = objectInfo; + return this; + } + + public ExternalLineageObject getObjectInfo() { + return objectInfo; + } + + public ListExternalLineageRelationshipsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExternalLineageRelationshipsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExternalLineageRelationshipsRequest that = (ListExternalLineageRelationshipsRequest) o; + return Objects.equals(lineageDirection, that.lineageDirection) + && Objects.equals(objectInfo, that.objectInfo) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(lineageDirection, objectInfo, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExternalLineageRelationshipsRequest.class) + .add("lineageDirection", lineageDirection) + .add("objectInfo", objectInfo) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java new file mode 100755 index 000000000..7bf328c52 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListExternalLineageRelationshipsResponse { + /** */ + @JsonProperty("external_lineage_relationships") + private Collection externalLineageRelationships; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExternalLineageRelationshipsResponse setExternalLineageRelationships( + Collection externalLineageRelationships) { + this.externalLineageRelationships = externalLineageRelationships; + return this; + } + + public Collection getExternalLineageRelationships() { + return externalLineageRelationships; + } + + public ListExternalLineageRelationshipsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExternalLineageRelationshipsResponse that = (ListExternalLineageRelationshipsResponse) o; + return Objects.equals(externalLineageRelationships, that.externalLineageRelationships) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(externalLineageRelationships, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExternalLineageRelationshipsResponse.class) + .add("externalLineageRelationships", externalLineageRelationships) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataRequest.java new file mode 100755 index 000000000..8e530d320 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListExternalMetadataRequest { + /** */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListExternalMetadataRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExternalMetadataRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExternalMetadataRequest that = (ListExternalMetadataRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExternalMetadataRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataResponse.java new file mode 100755 index 000000000..0a218a924 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListExternalMetadataResponse { + /** */ + @JsonProperty("external_metadata") + private Collection externalMetadata; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExternalMetadataResponse setExternalMetadata( + Collection externalMetadata) { + this.externalMetadata = externalMetadata; + return this; + } + + public Collection getExternalMetadata() { + return externalMetadata; + } + + public ListExternalMetadataResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExternalMetadataResponse that = (ListExternalMetadataResponse) o; + return Objects.equals(externalMetadata, that.externalMetadata) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(externalMetadata, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExternalMetadataResponse.class) + .add("externalMetadata", externalMetadata) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java index 8cb9ee53b..d3a7edbbb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java @@ -25,7 +25,7 @@ public void assign(CreateMetastoreAssignment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, AssignResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -64,7 +64,7 @@ public void delete(DeleteMetastoreRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -116,7 +116,7 @@ public void unassign(UnassignRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, UnassignResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -145,7 +145,7 @@ public void updateAssignment(UpdateMetastoreAssignment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateAssignmentResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java index b847105e0..db065c353 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java @@ -25,7 +25,7 @@ public void delete(DeleteModelVersionRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java index aac4fa412..f1c2bd00e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java @@ -91,7 +91,7 @@ public class MonitorInfo { @JsonProperty("snapshot") private MonitorSnapshot snapshot; - /** The status of the monitor. */ + /** */ @JsonProperty("status") private MonitorInfoStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java index 11f64ecf9..96edc0f08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableStatus.java @@ -10,10 +10,7 @@ /** Status of an online table. */ @Generated public class OnlineTableStatus { - /** - * Detailed status of an online table. Shown if the online table is in the - * ONLINE_CONTINUOUS_UPDATE or the ONLINE_UPDATING_PIPELINE_RESOURCES state. - */ + /** */ @JsonProperty("continuous_update_status") private ContinuousUpdateStatus continuousUpdateStatus; @@ -21,10 +18,7 @@ public class OnlineTableStatus { @JsonProperty("detailed_state") private OnlineTableState detailedState; - /** - * Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the - * ONLINE_PIPELINE_FAILED state. - */ + /** */ @JsonProperty("failed_status") private FailedStatus failedStatus; @@ -32,17 +26,11 @@ public class OnlineTableStatus { @JsonProperty("message") private String message; - /** - * Detailed status of an online table. Shown if the online table is in the - * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. - */ + /** */ @JsonProperty("provisioning_status") private ProvisioningStatus provisioningStatus; - /** - * Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE - * or the ONLINE_NO_PENDING_UPDATE state. - */ + /** */ @JsonProperty("triggered_update_status") private TriggeredUpdateStatus triggeredUpdateStatus; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java index 2121a24ce..c16b10697 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteOnlineTableRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java index 78553ca8e..cb4a6fbcf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java @@ -25,7 +25,7 @@ public void cancelRefresh(CancelRefreshRequest request) { try { Request req = new Request("POST", path); ApiClient.setQuery(req, request); - apiClient.execute(req, CancelRefreshResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -51,7 +51,7 @@ public void delete(DeleteQualityMonitorRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java index bc2de4af6..a87fc880b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteRegisteredModelRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -51,7 +51,7 @@ public void deleteAlias(DeleteAliasRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteAliasResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java index e5470c89b..891336ded 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteSchemaRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java index d4bdc219e..fd3661c0a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java @@ -4,6 +4,7 @@ import com.databricks.sdk.support.Generated; +/** Latest kind: TABLE_DELTA_ICEBERG_DELTASHARING = 252; Next id:253 */ @Generated public enum SecurableKind { TABLE_DB_STORAGE, @@ -11,6 +12,7 @@ public enum SecurableKind { TABLE_DELTASHARING, TABLE_DELTASHARING_MUTABLE, TABLE_DELTA_EXTERNAL, + TABLE_DELTA_ICEBERG_DELTASHARING, TABLE_DELTA_ICEBERG_MANAGED, TABLE_DELTA_UNIFORM_HUDI_EXTERNAL, TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java index 450be090d..fff872001 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java @@ -11,10 +11,6 @@ /** Manifest of a specific securable kind. */ @Generated public class SecurableKindManifest { - /** A list of allowed option names, consistent with the 'options' field. */ - @JsonProperty("allowedOptions") - private Collection allowedOptions; - /** Privileges that can be assigned to the securable. */ @JsonProperty("assignable_privileges") private Collection assignablePrivileges; @@ -27,10 +23,6 @@ public class SecurableKindManifest { @JsonProperty("options") private Collection options; - /** A list of required option names, consistent with the 'options' field. */ - @JsonProperty("requiredOptions") - private Collection requiredOptions; - /** Securable kind to get manifest of. */ @JsonProperty("securable_kind") private SecurableKind securableKind; @@ -39,15 +31,6 @@ public class SecurableKindManifest { @JsonProperty("securable_type") private SecurableType securableType; - public SecurableKindManifest setAllowedOptions(Collection allowedOptions) { - this.allowedOptions = allowedOptions; - return this; - } - - public Collection getAllowedOptions() { - return allowedOptions; - } - public SecurableKindManifest setAssignablePrivileges(Collection assignablePrivileges) { this.assignablePrivileges = assignablePrivileges; return this; @@ -75,15 +58,6 @@ public Collection getOptions() { return options; } - public SecurableKindManifest setRequiredOptions(Collection requiredOptions) { - this.requiredOptions = requiredOptions; - return this; - } - - public Collection getRequiredOptions() { - return requiredOptions; - } - public SecurableKindManifest setSecurableKind(SecurableKind securableKind) { this.securableKind = securableKind; return this; @@ -107,35 +81,24 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SecurableKindManifest that = (SecurableKindManifest) o; - return Objects.equals(allowedOptions, that.allowedOptions) - && Objects.equals(assignablePrivileges, that.assignablePrivileges) + return Objects.equals(assignablePrivileges, that.assignablePrivileges) && Objects.equals(capabilities, that.capabilities) && Objects.equals(options, that.options) - && Objects.equals(requiredOptions, that.requiredOptions) && Objects.equals(securableKind, that.securableKind) && Objects.equals(securableType, that.securableType); } @Override public int hashCode() { - return Objects.hash( - allowedOptions, - assignablePrivileges, - capabilities, - options, - requiredOptions, - securableKind, - securableType); + return Objects.hash(assignablePrivileges, capabilities, options, securableKind, securableType); } @Override public String toString() { return new ToStringer(SecurableKindManifest.class) - .add("allowedOptions", allowedOptions) .add("assignablePrivileges", assignablePrivileges) .add("capabilities", capabilities) .add("options", options) - .add("requiredOptions", requiredOptions) .add("securableKind", securableKind) .add("securableType", securableType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java index 3f1390c9f..87ab2bff3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteStorageCredentialRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java index 4adce737b..a0a652cd8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java @@ -26,7 +26,7 @@ public void disable(DisableRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DisableResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -43,7 +43,7 @@ public void enable(EnableRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, EnableResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java new file mode 100755 index 000000000..7def44e04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java @@ -0,0 +1,30 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum SystemType { + AMAZON_REDSHIFT, + AZURE_SYNAPSE, + CONFLUENT, + GOOGLE_BIGQUERY, + KAFKA, + LOOKER, + MICROSOFT_FABRIC, + MICROSOFT_SQL_SERVER, + MONGODB, + MYSQL, + ORACLE, + OTHER, + POSTGRESQL, + POWER_BI, + SALESFORCE, + SAP, + SERVICENOW, + SNOWFLAKE, + TABLEAU, + TERADATA, + WORKDAY, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java index e026a9c3f..5566e2eaa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteTableConstraintRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java index 02fc9066e..182c3d0f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -46,7 +46,7 @@ public class TableInfo { @JsonProperty("data_access_configuration_id") private String dataAccessConfigurationId; - /** Data source format */ + /** */ @JsonProperty("data_source_format") private DataSourceFormat dataSourceFormat; @@ -69,7 +69,7 @@ public class TableInfo { @JsonProperty("enable_predictive_optimization") private EnablePredictiveOptimization enablePredictiveOptimization; - /** Encryption options that apply to clients connecting to cloud storage. */ + /** */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java index 6f5b3304b..0c9058bbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java @@ -23,7 +23,7 @@ public void delete(DeleteTableRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -89,7 +89,7 @@ public void update(UpdateTableRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java index 8083ee16b..a27bbd94c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java @@ -9,18 +9,11 @@ @Generated public class TemporaryCredentials { - /** - * AWS temporary credentials for API authentication. Read more at - * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. - */ + /** */ @JsonProperty("aws_temp_credentials") private AwsCredentials awsTempCredentials; - /** - * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or - * Managed Identity. Read more at - * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token - */ + /** */ @JsonProperty("azure_aad") private AzureActiveDirectoryToken azureAad; @@ -31,10 +24,7 @@ public class TemporaryCredentials { @JsonProperty("expiration_time") private Long expirationTime; - /** - * GCP temporary credentials for API authentication. Read more at - * https://developers.google.com/identity/protocols/oauth2/service-account - */ + /** */ @JsonProperty("gcp_oauth_token") private GcpOauthToken gcpOauthToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java deleted file mode 100755 index 61a2a8cb3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UnassignResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UnassignResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java deleted file mode 100755 index 8deea1834..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateAssignmentResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateAssignmentResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLineageRelationshipRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLineageRelationshipRequest.java new file mode 100755 index 000000000..1b42084f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLineageRelationshipRequest.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateExternalLineageRelationshipRequest { + /** */ + @JsonProperty("external_lineage_relationship") + private UpdateRequestExternalLineage externalLineageRelationship; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateExternalLineageRelationshipRequest setExternalLineageRelationship( + UpdateRequestExternalLineage externalLineageRelationship) { + this.externalLineageRelationship = externalLineageRelationship; + return this; + } + + public UpdateRequestExternalLineage getExternalLineageRelationship() { + return externalLineageRelationship; + } + + public UpdateExternalLineageRelationshipRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExternalLineageRelationshipRequest that = (UpdateExternalLineageRelationshipRequest) o; + return Objects.equals(externalLineageRelationship, that.externalLineageRelationship) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(externalLineageRelationship, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateExternalLineageRelationshipRequest.class) + .add("externalLineageRelationship", externalLineageRelationship) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java index d2a759d9f..719c6c4d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java @@ -18,11 +18,11 @@ public class UpdateExternalLocation { @JsonProperty("credential_name") private String credentialName; - /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */ + /** Whether to enable file events on this external location. */ @JsonProperty("enable_file_events") private Boolean enableFileEvents; - /** Encryption options that apply to clients connecting to cloud storage. */ + /** */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; @@ -34,7 +34,7 @@ public class UpdateExternalLocation { @JsonProperty("fallback") private Boolean fallback; - /** [Create:OPT Update:OPT] File event queue settings. */ + /** File event queue settings. */ @JsonProperty("file_event_queue") private FileEventQueue fileEventQueue; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java new file mode 100755 index 000000000..890161e0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateExternalMetadataRequest { + /** */ + @JsonProperty("external_metadata") + private ExternalMetadata externalMetadata; + + /** Name of the external metadata object. */ + @JsonIgnore private String name; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateExternalMetadataRequest setExternalMetadata(ExternalMetadata externalMetadata) { + this.externalMetadata = externalMetadata; + return this; + } + + public ExternalMetadata getExternalMetadata() { + return externalMetadata; + } + + public UpdateExternalMetadataRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateExternalMetadataRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExternalMetadataRequest that = (UpdateExternalMetadataRequest) o; + return Objects.equals(externalMetadata, that.externalMetadata) + && Objects.equals(name, that.name) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(externalMetadata, name, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateExternalMetadataRequest.class) + .add("externalMetadata", externalMetadata) + .add("name", name) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java new file mode 100755 index 000000000..4ca0b7552 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class UpdateRequestExternalLineage { + /** List of column relationships between source and target objects. */ + @JsonProperty("columns") + private Collection columns; + + /** Unique identifier of the external lineage relationship. */ + @JsonProperty("id") + private String id; + + /** Key-value properties associated with the external lineage relationship. */ + @JsonProperty("properties") + private Map properties; + + /** Source object of the external lineage relationship. */ + @JsonProperty("source") + private ExternalLineageObject source; + + /** Target object of the external lineage relationship. */ + @JsonProperty("target") + private ExternalLineageObject target; + + public UpdateRequestExternalLineage setColumns(Collection columns) { + this.columns = columns; + return this; + } + + public Collection getColumns() { + return columns; + } + + public UpdateRequestExternalLineage setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateRequestExternalLineage setProperties(Map properties) { + this.properties = properties; + return this; + } + + public Map getProperties() { + return properties; + } + + public UpdateRequestExternalLineage setSource(ExternalLineageObject source) { + this.source = source; + return this; + } + + public ExternalLineageObject getSource() { + return source; + } + + public UpdateRequestExternalLineage setTarget(ExternalLineageObject target) { + this.target = target; + return this; + } + + public ExternalLineageObject getTarget() { + return target; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateRequestExternalLineage that = (UpdateRequestExternalLineage) o; + return Objects.equals(columns, that.columns) + && Objects.equals(id, that.id) + && Objects.equals(properties, that.properties) + && Objects.equals(source, that.source) + && Objects.equals(target, that.target); + } + + @Override + public int hashCode() { + return Objects.hash(columns, id, properties, source, target); + } + + @Override + public String toString() { + return new ToStringer(UpdateRequestExternalLineage.class) + .add("columns", columns) + .add("id", id) + .add("properties", properties) + .add("source", source) + .add("target", target) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java deleted file mode 100755 index c8187417c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java index 7dea44dd8..30b2980a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java @@ -10,11 +10,11 @@ /** Next ID: 17 */ @Generated public class ValidateCredentialRequest { - /** The AWS IAM role configuration */ + /** */ @JsonProperty("aws_iam_role") private AwsIamRole awsIamRole; - /** The Azure managed identity configuration. */ + /** */ @JsonProperty("azure_managed_identity") private AzureManagedIdentity azureManagedIdentity; @@ -22,7 +22,7 @@ public class ValidateCredentialRequest { @JsonProperty("credential_name") private String credentialName; - /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ + /** */ @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccount databricksGcpServiceAccount; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java index ea18910d4..21ac9b83e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java @@ -36,7 +36,7 @@ public class VolumeInfo { @JsonProperty("created_by") private String createdBy; - /** Encryption options that apply to clients connecting to cloud storage. */ + /** */ @JsonProperty("encryption_details") private EncryptionDetails encryptionDetails; @@ -76,13 +76,7 @@ public class VolumeInfo { @JsonProperty("volume_id") private String volumeId; - /** - * The type of the volume. An external volume is located in the specified external location. A - * managed volume is located in the default location which is specified by the parent schema, or - * the parent catalog, or the Metastore. [Learn more] - * - *

[Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external - */ + /** */ @JsonProperty("volume_type") private VolumeType volumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java index 00fe08a18..9462102a8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteVolumeRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java index 34d8b7b59..d2d695089 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java @@ -40,7 +40,7 @@ public void delete(DeleteCleanRoomAssetRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteCleanRoomAssetResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java index afb1ee357..46f713c85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java @@ -30,9 +30,7 @@ public class CleanRoomRemoteDetail { @JsonProperty("collaborators") private Collection collaborators; - /** - * The compliance security profile used to process regulated data following compliance standards. - */ + /** */ @JsonProperty("compliance_security_profile") private ComplianceSecurityProfile complianceSecurityProfile; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java index 69469fc0c..aca6d9bde 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java @@ -10,7 +10,7 @@ /** * A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting * environment where multiple parties can work together on sensitive enterprise data without direct - * access to each other’s data. + * access to each other's data. */ @Generated public class CleanRoomsAPI { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java index b0bacf5d0..d1dfc93b6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java @@ -53,7 +53,7 @@ public void delete(DeleteCleanRoomRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java index c3306e17d..56c3b7fb3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java @@ -6,7 +6,7 @@ /** * A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting * environment where multiple parties can work together on sensitive enterprise data without direct - * access to each other’s data. + * access to each other's data. * *

This is the high-level interface, that contains generated methods. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java index 5cc4c4842..6c0f39d60 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java @@ -10,7 +10,7 @@ @Generated public class CreateCleanRoomAssetRequest { - /** Metadata of the clean room asset */ + /** */ @JsonProperty("asset") private CleanRoomAsset asset; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java deleted file mode 100755 index 4efe5848d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java +++ /dev/null @@ -1,32 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.cleanrooms; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -/** - * Response for delete clean room request. Using an empty message since the generic Empty proto does - * not externd UnshadedMessageMarker. - */ -@Generated -public class DeleteCleanRoomAssetResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteCleanRoomAssetResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java index bda1f2843..f5c2d54fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java @@ -10,7 +10,10 @@ @Generated public class UpdateCleanRoomAssetRequest { - /** Metadata of the clean room asset */ + /** + * The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset + * to update. + */ @JsonProperty("asset") private CleanRoomAsset asset; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java deleted file mode 100755 index 2c169a9d7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class AddResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(AddResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java index 825127bce..377cbeea7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java @@ -10,12 +10,7 @@ /** Attributes set during cluster creation which are related to Amazon Web Services. */ @Generated public class AwsAttributes { - /** - * Availability type used for all subsequent nodes past the `first_on_demand` ones. - * - *

Note: If `first_on_demand` is zero, this availability type will be used for the entire - * cluster. - */ + /** */ @JsonProperty("availability") private AwsAvailability availability; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java deleted file mode 100755 index 2109537fd..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CancelResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CancelResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java deleted file mode 100755 index 36b86fe5f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class ChangeClusterOwnerResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(ChangeClusterOwnerResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java index c16994224..a123970d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java @@ -13,7 +13,7 @@ public class ClusterAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java index 17316f8c5..6cc04ec02 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java @@ -68,33 +68,7 @@ public class ClusterAttributes { @JsonProperty("custom_tags") private Map customTags; - /** - * Data security mode decides what data governance model to use when accessing data from a - * cluster. - * - *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * - * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on - * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * - * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - * - *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - * multiple users sharing the cluster. Data governance features are not available in this mode. * - * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - * `single_user_name`. Most programming languages, cluster features and data governance features - * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - * users. Cluster users are fully isolated so that they cannot see each other's data and - * credentials. Most data governance features are supported in this mode. But programming - * languages and cluster features might be limited. - * - *

The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed - * for future Databricks Runtime versions: - * - *

* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way - * that doesn’t have UC nor passthrough enabled. - */ + /** */ @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; @@ -160,21 +134,7 @@ public class ClusterAttributes { @JsonProperty("is_single_node") private Boolean isSingleNode; - /** - * The kind of compute described by this compute specification. - * - *

Depending on `kind`, different validations and default values will be applied. - * - *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with - * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - * - *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - * - *

[simple form]: https://docs.databricks.com/compute/simple-form.html - */ + /** */ @JsonProperty("kind") private Kind kind; @@ -269,7 +229,7 @@ public class ClusterAttributes { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** Cluster Attributes showing for clusters workload types. */ + /** */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java index 671640778..b9901dced 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java @@ -109,33 +109,7 @@ public class ClusterDetails { @JsonProperty("custom_tags") private Map customTags; - /** - * Data security mode decides what data governance model to use when accessing data from a - * cluster. - * - *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * - * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on - * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * - * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - * - *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - * multiple users sharing the cluster. Data governance features are not available in this mode. * - * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - * `single_user_name`. Most programming languages, cluster features and data governance features - * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - * users. Cluster users are fully isolated so that they cannot see each other's data and - * credentials. Most data governance features are supported in this mode. But programming - * languages and cluster features might be limited. - * - *

The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed - * for future Databricks Runtime versions: - * - *

* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way - * that doesn’t have UC nor passthrough enabled. - */ + /** */ @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; @@ -235,21 +209,7 @@ public class ClusterDetails { @JsonProperty("jdbc_port") private Long jdbcPort; - /** - * The kind of compute described by this compute specification. - * - *

Depending on `kind`, different validations and default values will be applied. - * - *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with - * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - * - *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - * - *

[simple form]: https://docs.databricks.com/compute/simple-form.html - */ + /** */ @JsonProperty("kind") private Kind kind; @@ -409,7 +369,7 @@ public class ClusterDetails { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** Cluster Attributes showing for clusters workload types. */ + /** */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java index a96fd99fb..ac0c8d30d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java @@ -18,7 +18,7 @@ public class ClusterPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java index 33db8d0a8..1989ad279 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermissionsDescription.java @@ -13,7 +13,7 @@ public class ClusterPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java index ec3d88c43..f70a7831d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java @@ -38,7 +38,7 @@ public void delete(DeletePolicy request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeletePolicyResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -52,7 +52,7 @@ public void edit(EditPolicy request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, EditPolicyResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java index 651b6e1bb..672e5271e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java @@ -13,7 +13,7 @@ public class ClusterPolicyAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java index fa2715b7f..985913efe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermission.java @@ -18,7 +18,7 @@ public class ClusterPolicyPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java index 169d11d86..56e831a9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyPermissionsDescription.java @@ -13,7 +13,7 @@ public class ClusterPolicyPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index f93b893bc..ab8cafe29 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -82,33 +82,7 @@ public class ClusterSpec { @JsonProperty("custom_tags") private Map customTags; - /** - * Data security mode decides what data governance model to use when accessing data from a - * cluster. - * - *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * - * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on - * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * - * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - * - *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - * multiple users sharing the cluster. Data governance features are not available in this mode. * - * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - * `single_user_name`. Most programming languages, cluster features and data governance features - * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - * users. Cluster users are fully isolated so that they cannot see each other's data and - * credentials. Most data governance features are supported in this mode. But programming - * languages and cluster features might be limited. - * - *

The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed - * for future Databricks Runtime versions: - * - *

* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way - * that doesn’t have UC nor passthrough enabled. - */ + /** */ @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; @@ -174,21 +148,7 @@ public class ClusterSpec { @JsonProperty("is_single_node") private Boolean isSingleNode; - /** - * The kind of compute described by this compute specification. - * - *

Depending on `kind`, different validations and default values will be applied. - * - *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with - * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - * - *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - * - *

[simple form]: https://docs.databricks.com/compute/simple-form.html - */ + /** */ @JsonProperty("kind") private Kind kind; @@ -296,7 +256,7 @@ public class ClusterSpec { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** Cluster Attributes showing for clusters workload types. */ + /** */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java index e847a4571..23c02c8d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java @@ -24,7 +24,7 @@ public void changeOwner(ChangeClusterOwner request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ChangeClusterOwnerResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -52,7 +52,7 @@ public void delete(DeleteCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -66,7 +66,7 @@ public void edit(EditCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, EditClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -172,7 +172,7 @@ public void permanentDelete(PermanentDeleteCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PermanentDeleteClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -186,7 +186,7 @@ public void pin(PinCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PinClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -200,7 +200,7 @@ public void resize(ResizeCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ResizeClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -214,7 +214,7 @@ public void restart(RestartCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, RestartClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -254,7 +254,7 @@ public void start(StartCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, StartClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -268,7 +268,7 @@ public void unpin(UnpinCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UnpinClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -282,7 +282,7 @@ public void update(UpdateCluster request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateClusterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java index 3cd62c5cb..550dfe69a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java @@ -24,7 +24,7 @@ public void cancel(CancelCommand request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, CancelResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -78,7 +78,7 @@ public void destroy(DestroyContext request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DestroyResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index b860190d0..eb15d7964 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -85,33 +85,7 @@ public class CreateCluster { @JsonProperty("custom_tags") private Map customTags; - /** - * Data security mode decides what data governance model to use when accessing data from a - * cluster. - * - *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * - * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on - * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * - * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - * - *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - * multiple users sharing the cluster. Data governance features are not available in this mode. * - * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - * `single_user_name`. Most programming languages, cluster features and data governance features - * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - * users. Cluster users are fully isolated so that they cannot see each other's data and - * credentials. Most data governance features are supported in this mode. But programming - * languages and cluster features might be limited. - * - *

The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed - * for future Databricks Runtime versions: - * - *

* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way - * that doesn’t have UC nor passthrough enabled. - */ + /** */ @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; @@ -177,21 +151,7 @@ public class CreateCluster { @JsonProperty("is_single_node") private Boolean isSingleNode; - /** - * The kind of compute described by this compute specification. - * - *

Depending on `kind`, different validations and default values will be applied. - * - *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with - * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - * - *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - * - *

[simple form]: https://docs.databricks.com/compute/simple-form.html - */ + /** */ @JsonProperty("kind") private Kind kind; @@ -299,7 +259,7 @@ public class CreateCluster { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** Cluster Attributes showing for clusters workload types. */ + /** */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java deleted file mode 100755 index 91fc276a6..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java deleted file mode 100755 index 3a71cf38e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteInstancePoolResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteInstancePoolResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java deleted file mode 100755 index 60aa4fcc7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeletePolicyResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeletePolicyResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java deleted file mode 100755 index 62648157e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java deleted file mode 100755 index 2bf691176..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DestroyResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DestroyResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java index 3e04994c6..b3ca16e07 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java @@ -10,17 +10,11 @@ /** Describes the disk type. */ @Generated public class DiskType { - /** - * All Azure Disk types that Databricks supports. See - * https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks - */ + /** */ @JsonProperty("azure_disk_volume_type") private DiskTypeAzureDiskVolumeType azureDiskVolumeType; - /** - * All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for - * details. - */ + /** */ @JsonProperty("ebs_volume_type") private DiskTypeEbsVolumeType ebsVolumeType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index 7fe108450..f1b840e3a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -82,33 +82,7 @@ public class EditCluster { @JsonProperty("custom_tags") private Map customTags; - /** - * Data security mode decides what data governance model to use when accessing data from a - * cluster. - * - *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * - * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on - * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * - * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - * - *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - * multiple users sharing the cluster. Data governance features are not available in this mode. * - * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - * `single_user_name`. Most programming languages, cluster features and data governance features - * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - * users. Cluster users are fully isolated so that they cannot see each other's data and - * credentials. Most data governance features are supported in this mode. But programming - * languages and cluster features might be limited. - * - *

The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed - * for future Databricks Runtime versions: - * - *

* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way - * that doesn’t have UC nor passthrough enabled. - */ + /** */ @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; @@ -174,21 +148,7 @@ public class EditCluster { @JsonProperty("is_single_node") private Boolean isSingleNode; - /** - * The kind of compute described by this compute specification. - * - *

Depending on `kind`, different validations and default values will be applied. - * - *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with - * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - * - *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - * - *

[simple form]: https://docs.databricks.com/compute/simple-form.html - */ + /** */ @JsonProperty("kind") private Kind kind; @@ -296,7 +256,7 @@ public class EditCluster { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** Cluster Attributes showing for clusters workload types. */ + /** */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java deleted file mode 100755 index fe1297b0a..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class EditClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(EditClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java deleted file mode 100755 index a2c4bbdd3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class EditInstancePoolResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(EditInstancePoolResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java deleted file mode 100755 index e2931443b..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class EditPolicyResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(EditPolicyResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java deleted file mode 100755 index 35a7dba24..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class EditResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(EditResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java index 94689ee30..e038a880e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteGlobalInitScriptRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -76,7 +76,7 @@ public void update(GlobalInitScriptUpdateRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java deleted file mode 100755 index 58d55bb76..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class InstallLibrariesResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(InstallLibrariesResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java index 94ea72be1..5883b80e6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java @@ -13,7 +13,7 @@ public class InstancePoolAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java index a97e496ad..774dcaf5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java @@ -10,10 +10,7 @@ /** Attributes set during instance pool creation which are related to GCP. */ @Generated public class InstancePoolGcpAttributes { - /** - * This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or - * preemptible VMs with a fallback to on-demand VMs if the former is unavailable. - */ + /** */ @JsonProperty("gcp_availability") private GcpAvailability gcpAvailability; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java index 7d00ebac0..e8aade399 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java @@ -18,7 +18,7 @@ public class InstancePoolPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java index 9b2ede0ca..46385e5c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermissionsDescription.java @@ -13,7 +13,7 @@ public class InstancePoolPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java index b80dd7710..42514c126 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java @@ -38,7 +38,7 @@ public void delete(DeleteInstancePool request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteInstancePoolResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -52,7 +52,7 @@ public void edit(EditInstancePool request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, EditInstancePoolResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java index de759151d..08257cfc1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java @@ -24,7 +24,7 @@ public void add(AddInstanceProfile request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, AddResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -38,7 +38,7 @@ public void edit(InstanceProfile request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, EditResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -64,7 +64,7 @@ public void remove(RemoveInstanceProfile request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, RemoveResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java index 2b8b647f2..6bb0dd63e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java @@ -49,7 +49,7 @@ public void install(InstallLibraries request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, InstallLibrariesResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -63,7 +63,7 @@ public void uninstall(UninstallLibraries request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UninstallLibrariesResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java deleted file mode 100755 index 4a82a0542..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class PermanentDeleteClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(PermanentDeleteClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java deleted file mode 100755 index ad4b7f691..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class PinClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(PinClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java deleted file mode 100755 index 7c8b2e615..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class RemoveResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(RemoveResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java deleted file mode 100755 index a14e93d60..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class ResizeClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(ResizeClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java deleted file mode 100755 index a1dac51ac..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class RestartClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(RestartClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java deleted file mode 100755 index ccdb37a29..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class StartClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(StartClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java index 74a38a68c..00e8118e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java @@ -148,6 +148,7 @@ public enum TerminationReasonCode { SECRET_CREATION_FAILURE, SECRET_PERMISSION_DENIED, SECRET_RESOLUTION_ERROR, + SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION, SECURITY_DAEMON_REGISTRATION_EXCEPTION, SELF_BOOTSTRAP_FAILURE, SERVERLESS_LONG_RUNNING_TERMINATED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java deleted file mode 100755 index 33d5b7d3c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UninstallLibrariesResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UninstallLibrariesResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java deleted file mode 100755 index 74a883deb..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UnpinClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UnpinClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java index 9471ef127..79e3b453d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java @@ -71,33 +71,7 @@ public class UpdateClusterResource { @JsonProperty("custom_tags") private Map customTags; - /** - * Data security mode decides what data governance model to use when accessing data from a - * cluster. - * - *

The following modes can only be used when `kind = CLASSIC_PREVIEW`. * - * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on - * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * - * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - * - *

The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - * multiple users sharing the cluster. Data governance features are not available in this mode. * - * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - * `single_user_name`. Most programming languages, cluster features and data governance features - * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - * users. Cluster users are fully isolated so that they cannot see each other's data and - * credentials. Most data governance features are supported in this mode. But programming - * languages and cluster features might be limited. - * - *

The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed - * for future Databricks Runtime versions: - * - *

* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way - * that doesn’t have UC nor passthrough enabled. - */ + /** */ @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; @@ -163,21 +137,7 @@ public class UpdateClusterResource { @JsonProperty("is_single_node") private Boolean isSingleNode; - /** - * The kind of compute described by this compute specification. - * - *

Depending on `kind`, different validations and default values will be applied. - * - *

Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with - * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - * - *

By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - * - *

[simple form]: https://docs.databricks.com/compute/simple-form.html - */ + /** */ @JsonProperty("kind") private Kind kind; @@ -285,7 +245,7 @@ public class UpdateClusterResource { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** Cluster Attributes showing for clusters workload types. */ + /** */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java deleted file mode 100755 index 9647b99d1..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateClusterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateClusterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java deleted file mode 100755 index 62feccc35..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java index 33e0c8889..60c0a7fc0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java @@ -13,7 +13,7 @@ public class CreateScheduleRequest { /** UUID identifying the dashboard to which the schedule belongs. */ @JsonIgnore private String dashboardId; - /** */ + /** The schedule to create. A dashboard is limited to 10 schedules. */ @JsonProperty("schedule") private Schedule schedule; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java index 815f3fc8e..59d03eee8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java @@ -16,7 +16,7 @@ public class CreateSubscriptionRequest { /** UUID identifying the schedule to which the subscription belongs. */ @JsonIgnore private String scheduleId; - /** */ + /** The subscription to create. A schedule is limited to 100 subscriptions. */ @JsonProperty("subscription") private Subscription subscription; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index c42dec495..2548416fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -280,7 +280,7 @@ public void trashSpace(String spaceId) { trashSpace(new GenieTrashSpaceRequest().setSpaceId(spaceId)); } - /** Trash a Genie Space. */ + /** Move a Genie Space to the trash. */ public void trashSpace(GenieTrashSpaceRequest request) { impl.trashSpace(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index feed6dd92..d055781dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -43,7 +43,7 @@ public void deleteConversation(GenieDeleteConversationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteConversationResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -221,7 +221,7 @@ public void trashSpace(GenieTrashSpaceRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, TrashSpaceResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java index 67a45a2b9..7be6e4bea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java @@ -20,7 +20,7 @@ public class GenieListConversationsRequest { @QueryParam("page_token") private String pageToken; - /** The ID associated with the Genie space to list conversations from. */ + /** The ID of the Genie space to retrieve conversations from. */ @JsonIgnore private String spaceId; public GenieListConversationsRequest setPageSize(Long pageSize) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java index db243673c..be97d6d5c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java @@ -53,22 +53,7 @@ public class GenieMessage { @JsonProperty("space_id") private String spaceId; - /** - * MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data - * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * - * `ASKING_AI`: Waiting for the LLM to respond to the user's question. * `PENDING_WAREHOUSE`: - * Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing - * a generated SQL query. Get the SQL query result by calling - * [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * - * `FAILED`: The response generation or query execution failed. See `error` field. * `COMPLETED`: - * Message processing is completed. Results are in the `attachments` field. Get the SQL query - * result by calling - * [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * - * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available - * anymore. The user needs to rerun the query. Rerun the SQL query result by calling - * [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * - * `CANCELLED`: Message has been cancelled. - */ + /** */ @JsonProperty("status") private MessageStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index 02368b9cd..f0ac59161 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -74,6 +74,6 @@ GenieListConversationsResponse listConversations( GenieStartConversationResponse startConversation( GenieStartConversationMessageRequest genieStartConversationMessageRequest); - /** Trash a Genie Space. */ + /** Move a Genie Space to the trash. */ void trashSpace(GenieTrashSpaceRequest genieTrashSpaceRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java index 5b27effad..d23cc055c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java @@ -9,7 +9,7 @@ @Generated public class GenieTrashSpaceRequest { - /** The ID associated with the Genie space to be trashed. */ + /** The ID associated with the Genie space to be sent to the trash. */ @JsonIgnore private String spaceId; public GenieTrashSpaceRequest setSpaceId(String spaceId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java index ac931a0e7..3b744a103 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java @@ -72,7 +72,7 @@ public void deleteSchedule(DeleteScheduleRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteScheduleResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -88,7 +88,7 @@ public void deleteSubscription(DeleteSubscriptionRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteSubscriptionResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -232,7 +232,7 @@ public void trash(TrashDashboardRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, TrashDashboardResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -246,7 +246,7 @@ public void unpublish(UnpublishDashboardRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, UnpublishDashboardResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java deleted file mode 100755 index 0f43a3a49..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class TrashDashboardResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(TrashDashboardResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java deleted file mode 100755 index 211e9c010..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UnpublishDashboardResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UnpublishDashboardResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java index f3faa860d..b581278a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java @@ -13,7 +13,7 @@ public class UpdateScheduleRequest { /** UUID identifying the dashboard to which the schedule belongs. */ @JsonIgnore private String dashboardId; - /** */ + /** The schedule to update. */ @JsonProperty("schedule") private Schedule schedule; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java index 73af31b6c..59260918b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java @@ -9,10 +9,7 @@ @Generated public class CreateDatabaseInstanceRequest { - /** - * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and - * storage. - */ + /** Instance to create. */ @JsonProperty("database_instance") private DatabaseInstance databaseInstance; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java index 28c177a98..af69b9394 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java @@ -10,7 +10,7 @@ @Generated public class CreateDatabaseInstanceRoleRequest { - /** A DatabaseInstanceRole represents a Postgres role in a database instance. */ + /** */ @JsonProperty("database_instance_role") private DatabaseInstanceRole databaseInstanceRole; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java index bfadcb9fc..934692059 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java @@ -9,7 +9,7 @@ @Generated public class CreateDatabaseTableRequest { - /** Next field marker: 13 */ + /** */ @JsonProperty("table") private DatabaseTable table; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java index 7dc427c0d..1e650387d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java @@ -9,7 +9,7 @@ @Generated public class CreateSyncedDatabaseTableRequest { - /** Next field marker: 12 */ + /** */ @JsonProperty("synced_table") private SyncedDatabaseTable syncedTable; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java index ee9b0801e..702b11a4c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java @@ -95,7 +95,7 @@ public void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteDatabaseCatalogResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -108,7 +108,7 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteDatabaseInstanceResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -124,7 +124,7 @@ public void deleteDatabaseInstanceRole(DeleteDatabaseInstanceRoleRequest request Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteDatabaseInstanceRoleResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -137,7 +137,7 @@ public void deleteDatabaseTable(DeleteDatabaseTableRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteDatabaseTableResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -150,7 +150,7 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteSyncedDatabaseTableResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java index ba727372f..4653f62dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java @@ -11,6 +11,13 @@ /** Generates a credential that can be used to access database instances */ @Generated public class GenerateDatabaseCredentialRequest { + /** + * The returned token will be scoped to the union of instance_names and instances containing the + * specified UC tables, so instance_names is allowed to be empty. + */ + @JsonProperty("claims") + private Collection claims; + /** Instances to which the token will be scoped. */ @JsonProperty("instance_names") private Collection instanceNames; @@ -19,6 +26,15 @@ public class GenerateDatabaseCredentialRequest { @JsonProperty("request_id") private String requestId; + public GenerateDatabaseCredentialRequest setClaims(Collection claims) { + this.claims = claims; + return this; + } + + public Collection getClaims() { + return claims; + } + public GenerateDatabaseCredentialRequest setInstanceNames(Collection instanceNames) { this.instanceNames = instanceNames; return this; @@ -42,18 +58,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GenerateDatabaseCredentialRequest that = (GenerateDatabaseCredentialRequest) o; - return Objects.equals(instanceNames, that.instanceNames) + return Objects.equals(claims, that.claims) + && Objects.equals(instanceNames, that.instanceNames) && Objects.equals(requestId, that.requestId); } @Override public int hashCode() { - return Objects.hash(instanceNames, requestId); + return Objects.hash(claims, instanceNames, requestId); } @Override public String toString() { return new ToStringer(GenerateDatabaseCredentialRequest.class) + .add("claims", claims) .add("instanceNames", instanceNames) .add("requestId", requestId) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java new file mode 100755 index 000000000..d4c9dd181 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class RequestedClaims { + /** */ + @JsonProperty("permission_set") + private RequestedClaimsPermissionSet permissionSet; + + /** */ + @JsonProperty("resources") + private Collection resources; + + public RequestedClaims setPermissionSet(RequestedClaimsPermissionSet permissionSet) { + this.permissionSet = permissionSet; + return this; + } + + public RequestedClaimsPermissionSet getPermissionSet() { + return permissionSet; + } + + public RequestedClaims setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RequestedClaims that = (RequestedClaims) o; + return Objects.equals(permissionSet, that.permissionSet) + && Objects.equals(resources, that.resources); + } + + @Override + public int hashCode() { + return Objects.hash(permissionSet, resources); + } + + @Override + public String toString() { + return new ToStringer(RequestedClaims.class) + .add("permissionSet", permissionSet) + .add("resources", resources) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaimsPermissionSet.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaimsPermissionSet.java new file mode 100755 index 000000000..e3532d1ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaimsPermissionSet.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +/** Might add WRITE in the future */ +@Generated +public enum RequestedClaimsPermissionSet { + READ_ONLY, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedResource.java new file mode 100755 index 000000000..e3c4ab6d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedResource.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RequestedResource { + /** */ + @JsonProperty("table_name") + private String tableName; + + /** */ + @JsonProperty("unspecified_resource_name") + private String unspecifiedResourceName; + + public RequestedResource setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public RequestedResource setUnspecifiedResourceName(String unspecifiedResourceName) { + this.unspecifiedResourceName = unspecifiedResourceName; + return this; + } + + public String getUnspecifiedResourceName() { + return unspecifiedResourceName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RequestedResource that = (RequestedResource) o; + return Objects.equals(tableName, that.tableName) + && Objects.equals(unspecifiedResourceName, that.unspecifiedResourceName); + } + + @Override + public int hashCode() { + return Objects.hash(tableName, unspecifiedResourceName); + } + + @Override + public String toString() { + return new ToStringer(RequestedResource.class) + .add("tableName", tableName) + .add("unspecifiedResourceName", unspecifiedResourceName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java index c52f7a692..dd1ff0837 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java @@ -43,7 +43,7 @@ public class SyncedDatabaseTable { @JsonProperty("name") private String name; - /** Specification of a synced database table. */ + /** */ @JsonProperty("spec") private SyncedTableSpec spec; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java index e555485bd..84ecf7c87 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java @@ -10,10 +10,7 @@ /** Status of a synced table. */ @Generated public class SyncedTableStatus { - /** - * Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE - * or the SYNCED_UPDATING_PIPELINE_RESOURCES state. - */ + /** */ @JsonProperty("continuous_update_status") private SyncedTableContinuousUpdateStatus continuousUpdateStatus; @@ -21,10 +18,7 @@ public class SyncedTableStatus { @JsonProperty("detailed_state") private SyncedTableState detailedState; - /** - * Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the - * SYNCED_PIPELINE_FAILED state. - */ + /** */ @JsonProperty("failed_status") private SyncedTableFailedStatus failedStatus; @@ -55,17 +49,11 @@ public class SyncedTableStatus { @JsonProperty("pipeline_id") private String pipelineId; - /** - * Detailed status of a synced table. Shown if the synced table is in the - * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. - */ + /** */ @JsonProperty("provisioning_status") private SyncedTableProvisioningStatus provisioningStatus; - /** - * Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE - * or the SYNCED_NO_PENDING_UPDATE state. - */ + /** */ @JsonProperty("triggered_update_status") private SyncedTableTriggeredUpdateStatus triggeredUpdateStatus; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java index 9ce802b5b..bf29482bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java @@ -11,10 +11,7 @@ @Generated public class UpdateDatabaseInstanceRequest { - /** - * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and - * storage. - */ + /** */ @JsonProperty("database_instance") private DatabaseInstance databaseInstance; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java deleted file mode 100755 index 8d7475d9d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class AddBlockResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(AddBlockResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java deleted file mode 100755 index 8126adce4..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CloseResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CloseResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java deleted file mode 100755 index 63bfd628e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CreateDirectoryResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CreateDirectoryResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java index 1887e9e8c..e36bea6cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java @@ -24,7 +24,7 @@ public void addBlock(AddBlock request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, AddBlockResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -38,7 +38,7 @@ public void close(Close request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, CloseResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -66,7 +66,7 @@ public void delete(Delete request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -106,7 +106,7 @@ public void mkdirs(MkDirs request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, MkDirsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -120,7 +120,7 @@ public void move(Move request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, MoveResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -134,7 +134,7 @@ public void put(Put request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PutResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java deleted file mode 100755 index bd1a5f4b6..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteDirectoryResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteDirectoryResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java deleted file mode 100755 index 6d741c8d3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java index 508253f77..75a967508 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java @@ -26,7 +26,7 @@ public void createDirectory(CreateDirectoryRequest request) { try { Request req = new Request("PUT", path); ApiClient.setQuery(req, request); - apiClient.execute(req, CreateDirectoryResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -40,7 +40,7 @@ public void delete(DeleteFileRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -55,7 +55,7 @@ public void deleteDirectory(DeleteDirectoryRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteDirectoryResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -85,7 +85,7 @@ public void getDirectoryMetadata(GetDirectoryMetadataRequest request) { try { Request req = new Request("HEAD", path); ApiClient.setQuery(req, request); - apiClient.execute(req, GetDirectoryMetadataResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -130,7 +130,7 @@ public void upload(UploadRequest request) { Request req = new Request("PUT", path, request.getContents()); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/octet-stream"); - apiClient.execute(req, UploadResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java deleted file mode 100755 index 324875d3e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class GetDirectoryMetadataResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(GetDirectoryMetadataResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java deleted file mode 100755 index 4fdfa15f3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class MkDirsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(MkDirsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java deleted file mode 100755 index 47ccb461e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class MoveResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(MoveResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java deleted file mode 100755 index 8d41a1c8f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class PutResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(PutResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java deleted file mode 100755 index 68c79384d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.files; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UploadResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UploadResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java index 2e96227dc..c2f789ddb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java @@ -13,7 +13,7 @@ public class AccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private PermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java index ec429f93b..72c47a695 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java @@ -40,7 +40,7 @@ public void delete(DeleteAccountGroupRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -86,7 +86,7 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PatchResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -102,7 +102,7 @@ public void update(Group request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java index b576d22d2..32f4a6e4b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java @@ -41,7 +41,7 @@ public void delete(DeleteAccountServicePrincipalRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -88,7 +88,7 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PatchResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -104,7 +104,7 @@ public void update(ServicePrincipal request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java index 47fd2d266..b156d31af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java @@ -40,7 +40,7 @@ public void delete(DeleteAccountUserRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -86,7 +86,7 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PatchResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -102,7 +102,7 @@ public void update(User request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java deleted file mode 100755 index c0a1c8f66..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.iam; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java deleted file mode 100755 index 2b53b7330..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.iam; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteWorkspacePermissionAssignmentResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteWorkspacePermissionAssignmentResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java index 282689a35..fabddee61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteGroupRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -75,7 +75,7 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PatchResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -88,7 +88,7 @@ public void update(Group request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java deleted file mode 100755 index c5d81f464..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.iam; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class PatchResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(PatchResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java index 71509c72f..0fdb59942 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java @@ -18,7 +18,7 @@ public class Permission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private PermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java index 87ef5a99a..f394a3043 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java @@ -13,7 +13,7 @@ public class PermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private PermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java index 0c6daef30..f43216c63 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteServicePrincipalRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -75,7 +75,7 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PatchResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -88,7 +88,7 @@ public void update(ServicePrincipal request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java deleted file mode 100755 index 251e169d9..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.iam; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java index eb980dd36..ba2f1a67a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteUserRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -99,7 +99,7 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PatchResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -126,7 +126,7 @@ public void update(User request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java index d163fe44c..c53cbf64f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java @@ -26,7 +26,7 @@ public void delete(DeleteWorkspaceAssignmentRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteWorkspacePermissionAssignmentResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java index e7ad71205..147838c6d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java @@ -166,14 +166,7 @@ public class BaseRun { @JsonProperty("run_page_url") private String runPageUrl; - /** - * The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * - * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit - * run. A run created with :method:jobs/submit. - * - *

[dbutils.notebook.run]: - * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow - */ + /** */ @JsonProperty("run_type") private RunType runType; @@ -203,7 +196,7 @@ public class BaseRun { @JsonProperty("state") private RunState state; - /** The current status of the run */ + /** */ @JsonProperty("status") private RunStatus status; @@ -216,23 +209,11 @@ public class BaseRun { @JsonProperty("tasks") private Collection tasks; - /** - * The type of trigger that fired this run. - * - *

* `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * - * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on - * demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a - * previously failed run. This occurs when you request to re-run the job in case of failures. * - * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: - * Indicates a run that is triggered by a file arrival. * `CONTINUOUS`: Indicates a run that is - * triggered by a continuous job. * `TABLE`: Indicates a run that is triggered by a table update. - * * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job - * run. * `MODEL`: Indicates a run that is triggered by a model update. - */ + /** */ @JsonProperty("trigger") private TriggerType trigger; - /** Additional details about what triggered the run */ + /** */ @JsonProperty("trigger_info") private TriggerInfo triggerInfo; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java deleted file mode 100755 index c0b570c3f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CancelAllRunsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CancelAllRunsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java deleted file mode 100755 index 8c956bd00..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CancelRunResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CancelRunResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index 527597bec..ae5c94e8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -87,7 +87,7 @@ public class CreateJob { @JsonProperty("git_source") private GitSource gitSource; - /** An optional set of health rules that can be defined for this job. */ + /** */ @JsonProperty("health") private JobsHealthRules health; @@ -142,13 +142,7 @@ public class CreateJob { @JsonProperty("queue") private QueueSettings queue; - /** - * Write-only setting. Specifies the user or service principal that the job runs as. If not - * specified, the job runs as the user who created the job. - * - *

Either `user_name` or `service_principal_name` should be specified. If not, an error is - * thrown. - */ + /** */ @JsonProperty("run_as") private JobRunAs runAs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java deleted file mode 100755 index 2fb1336ff..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java deleted file mode 100755 index 125c19ad2..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteRunResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteRunResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java index e14ae5f66..88c0dae70 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java @@ -40,10 +40,7 @@ public class GitSource { @JsonProperty("git_provider") private GitProvider gitProvider; - /** - * Read-only state of the remote repository at the time the job was run. This field is only - * included on job runs. - */ + /** */ @JsonProperty("git_snapshot") private GitSnapshot gitSnapshot; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java index d2e6553d1..4333dec1e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java @@ -13,7 +13,7 @@ public class JobAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java index dc3dcccea..c54674370 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java @@ -13,11 +13,7 @@ public class JobEnvironment { @JsonProperty("environment_key") private String environmentKey; - /** - * The environment entity used to preserve serverless environment side panel, jobs' environment - * for non-notebook task, and DLT's environment for classic and serverless pipelines. In this - * minimal environment spec, only pip dependencies are supported. - */ + /** */ @JsonProperty("spec") private com.databricks.sdk.service.compute.Environment spec; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java index 4efc5cb2a..faa69a980 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java @@ -18,7 +18,7 @@ public class JobPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java index cbd9caa08..489b9aa33 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsDescription.java @@ -13,7 +13,7 @@ public class JobPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index a79bee35f..ccee39d63 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -83,7 +83,7 @@ public class JobSettings { @JsonProperty("git_source") private GitSource gitSource; - /** An optional set of health rules that can be defined for this job. */ + /** */ @JsonProperty("health") private JobsHealthRules health; @@ -138,13 +138,7 @@ public class JobSettings { @JsonProperty("queue") private QueueSettings queue; - /** - * Write-only setting. Specifies the user or service principal that the job runs as. If not - * specified, the job runs as the user who created the job. - * - *

Either `user_name` or `service_principal_name` should be specified. If not, an error is - * thrown. - */ + /** */ @JsonProperty("run_as") private JobRunAs runAs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java index c76e051bb..65f55a858 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java @@ -9,23 +9,11 @@ @Generated public class JobsHealthRule { - /** - * Specifies the health metric that is being evaluated for a particular health rule. - * - *

* `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * - * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed - * across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An - * estimate of the maximum offset lag across all streams. This metric is in Public Preview. * - * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This - * metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of - * outstanding files across all streams. This metric is in Public Preview. - */ + /** */ @JsonProperty("metric") private JobsHealthMetric metric; - /** - * Specifies the operator used to compare the health metric value with the specified threshold. - */ + /** */ @JsonProperty("op") private JobsHealthOperator op; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java index 0902a4b5f..f40489960 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java @@ -23,7 +23,7 @@ public void cancelAllRuns(CancelAllRuns request) { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, CancelAllRunsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -36,7 +36,7 @@ public void cancelRun(CancelRun request) { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, CancelRunResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -63,7 +63,7 @@ public void delete(DeleteJob request) { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -76,7 +76,7 @@ public void deleteRun(DeleteRun request) { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteRunResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -208,7 +208,7 @@ public void reset(ResetJob request) { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ResetResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -263,7 +263,7 @@ public void update(UpdateJob request) { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java index f89c9be21..0729c1863 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java @@ -9,13 +9,7 @@ @Generated public class QueueDetails { - /** - * The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to - * reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was - * queued due to reaching the per-job limit of concurrent job runs. * - * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of - * active run job tasks. - */ + /** */ @JsonProperty("code") private QueueDetailsCodeCode code; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java index 24a8b911d..7a0e6d77f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java @@ -40,7 +40,7 @@ public class RepairHistoryItem { @JsonProperty("state") private RunState state; - /** The current status of the run */ + /** */ @JsonProperty("status") private RunStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java deleted file mode 100755 index 34127047c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class ResetResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(ResetResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index 1e113851a..70bb1e68c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -175,14 +175,7 @@ public class Run { @JsonProperty("run_page_url") private String runPageUrl; - /** - * The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * - * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit - * run. A run created with :method:jobs/submit. - * - *

[dbutils.notebook.run]: - * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow - */ + /** */ @JsonProperty("run_type") private RunType runType; @@ -212,7 +205,7 @@ public class Run { @JsonProperty("state") private RunState state; - /** The current status of the run */ + /** */ @JsonProperty("status") private RunStatus status; @@ -225,23 +218,11 @@ public class Run { @JsonProperty("tasks") private Collection tasks; - /** - * The type of trigger that fired this run. - * - *

* `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * - * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on - * demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a - * previously failed run. This occurs when you request to re-run the job in case of failures. * - * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: - * Indicates a run that is triggered by a file arrival. * `CONTINUOUS`: Indicates a run that is - * triggered by a continuous job. * `TABLE`: Indicates a run that is triggered by a table update. - * * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job - * run. * `MODEL`: Indicates a run that is triggered by a model update. - */ + /** */ @JsonProperty("trigger") private TriggerType trigger; - /** Additional details about what triggered the run */ + /** */ @JsonProperty("trigger_info") private TriggerInfo triggerInfo; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java index e9219f045..9b882ec8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java @@ -14,7 +14,7 @@ public class RunStatus { @JsonProperty("queue_details") private QueueDetails queueDetails; - /** The current state of the run. */ + /** */ @JsonProperty("state") private RunLifecycleStateV2State state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index 56ffe907b..268a3c4fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -296,7 +296,7 @@ public class RunTask { @JsonProperty("state") private RunState state; - /** The current status of the run */ + /** */ @JsonProperty("status") private RunStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java index 1810d913e..a59931c1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java @@ -10,12 +10,7 @@ @Generated public class SqlAlertOutput { - /** - * The state of the SQL alert. - * - *

* UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger - * conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions - */ + /** */ @JsonProperty("alert_state") private SqlAlertState alertState; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java index 694a4df67..58c71b012 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java @@ -46,7 +46,7 @@ public class SubmitRun { @JsonProperty("git_source") private GitSource gitSource; - /** An optional set of health rules that can be defined for this job. */ + /** */ @JsonProperty("health") private JobsHealthRules health; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index e17c2d833..07677312a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -90,7 +90,7 @@ public class SubmitTask { @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; - /** An optional set of health rules that can be defined for this job. */ + /** */ @JsonProperty("health") private JobsHealthRules health; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 9de9e54e8..21d0c3daf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -95,7 +95,7 @@ public class Task { @JsonProperty("gen_ai_compute_task") private GenAiComputeTask genAiComputeTask; - /** An optional set of health rules that can be defined for this job. */ + /** */ @JsonProperty("health") private JobsHealthRules health; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java index 73025923e..044b0866f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java @@ -9,46 +9,7 @@ @Generated public class TerminationDetails { - /** - * The code indicates why the run was terminated. Additional codes might be introduced in future - * releases. * `SUCCESS`: The run was completed successfully. * `SUCCESS_WITH_FAILURES`: The run - * was completed successfully but some child runs failed. * `USER_CANCELED`: The run was - * successfully canceled during execution by a user. * `CANCELED`: The run was canceled during - * execution by the Databricks platform; for example, if the maximum run duration was exceeded. * - * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency - * type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The - * run encountered an unexpected error. Refer to the state message for further details. * - * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * - * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further - * details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when - * communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because - * it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The - * workspace has reached the quota for the maximum number of concurrent active runs. Consider - * scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it - * tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: - * The number of cluster creation, start, and upsize requests have exceeded the allotted rate - * limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: - * The run failed due to an error when accessing the customer blob storage. Refer to the state - * message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. - * For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run - * failed due to a permission issue while accessing a resource. Refer to the state message for - * further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the - * user-requested library. Refer to the state message for further details. The causes might - * include, but are not limited to: The provided library is invalid, there are insufficient - * permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The - * scheduled run exceeds the limit of maximum concurrent runs set for the job. * - * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the - * maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A - * resource necessary for run execution does not exist. Refer to the state message for further - * details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer - * to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud - * provider issue. Refer to the state message for further details. * - * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size - * limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. - * - *

[Link]: - * https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now - */ + /** */ @JsonProperty("code") private TerminationCodeCode code; @@ -59,15 +20,7 @@ public class TerminationDetails { @JsonProperty("message") private String message; - /** - * * `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the - * Databricks platform. Please look at the [status page] or contact support if the issue persists. - * * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job - * configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud - * provider. - * - *

[status page]: https://status.databricks.com/ - */ + /** */ @JsonProperty("type") private TerminationTypeType typeValue; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java deleted file mode 100755 index d53424f25..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java index c817eb350..919752faf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java @@ -42,7 +42,7 @@ public void delete(DeleteInstallationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteInstallationResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java deleted file mode 100755 index b7cca36ab..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.marketplace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteExchangeFilterResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteExchangeFilterResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java deleted file mode 100755 index be2eb59ea..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.marketplace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteExchangeResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteExchangeResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java deleted file mode 100755 index f799f3705..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.marketplace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteFileResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteFileResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java deleted file mode 100755 index 0928712ff..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.marketplace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteInstallationResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteInstallationResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java deleted file mode 100755 index 1250054c3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.marketplace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteListingResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteListingResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java deleted file mode 100755 index 41f94447f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.marketplace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteProviderResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteProviderResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java index 60e960e72..93cacffed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java @@ -69,7 +69,7 @@ public class ListingSummary { @JsonProperty("share") private ShareInfo share; - /** Enums */ + /** */ @JsonProperty("status") private ListingStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java index 8765d123b..11e2e383e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java @@ -17,7 +17,7 @@ public class PersonalizationRequest { @JsonProperty("consumer_region") private RegionInfo consumerRegion; - /** contact info for the consumer requesting data or performing a listing installation */ + /** */ @JsonProperty("contact_info") private ContactInfo contactInfo; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java index 6baa7bce6..be01971e8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteExchangeFilterRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteExchangeFilterResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java index 5ac2520b3..c40746d43 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java @@ -51,7 +51,7 @@ public void delete(DeleteExchangeRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteExchangeResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -65,7 +65,7 @@ public void deleteListingFromExchange(RemoveExchangeForListingRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, RemoveExchangeForListingResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java index e8fe25bac..4fc8f0927 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteFileRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteFileResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java index ed54330ad..3cddc1b63 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteListingRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteListingResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java index 618decdf9..4189392dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteProviderRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteProviderResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java deleted file mode 100755 index 02a6e7364..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.marketplace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class RemoveExchangeForListingResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(RemoveExchangeForListingResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java index 5a370dd43..80ecebe7e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java @@ -7,28 +7,17 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Activity recorded for the action. */ +/** + * For activities, this contains the activity recorded for the action. For comments, this contains + * the comment details. For transition requests, this contains the transition request details. + */ @Generated public class Activity { - /** - * Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding - * stage transition. - * - *

* `REQUESTED_TRANSITION`: User requested the corresponding stage transition. - * - *

* `CANCELLED_REQUEST`: User cancelled an existing transition request. - * - *

* `APPROVED_REQUEST`: User approved the corresponding stage transition. - * - *

* `REJECTED_REQUEST`: User rejected the coressponding stage transition. - * - *

* `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing - * model versions in a stage. - */ + /** */ @JsonProperty("activity_type") private ActivityType activityType; - /** User-provided comment associated with the activity. */ + /** User-provided comment associated with the activity, comment, or transition request. */ @JsonProperty("comment") private String comment; @@ -48,7 +37,7 @@ public class Activity { *

* `Archived`: Archived stage. */ @JsonProperty("from_stage") - private Stage fromStage; + private String fromStage; /** Unique identifier for the object. */ @JsonProperty("id") @@ -78,7 +67,7 @@ public class Activity { *

* `Archived`: Archived stage. */ @JsonProperty("to_stage") - private Stage toStage; + private String toStage; /** The username of the user that created the object. */ @JsonProperty("user_id") @@ -111,12 +100,12 @@ public Long getCreationTimestamp() { return creationTimestamp; } - public Activity setFromStage(Stage fromStage) { + public Activity setFromStage(String fromStage) { this.fromStage = fromStage; return this; } - public Stage getFromStage() { + public String getFromStage() { return fromStage; } @@ -147,12 +136,12 @@ public String getSystemComment() { return systemComment; } - public Activity setToStage(Stage toStage) { + public Activity setToStage(String toStage) { this.toStage = toStage; return this; } - public Stage getToStage() { + public String getToStage() { return toStage; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java index 727e1b2c6..8d4436b4b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java @@ -5,16 +5,23 @@ import com.databricks.sdk.support.Generated; /** - * An action that a user (with sufficient permissions) could take on an activity. Valid values are: - * * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + * An action that a user (with sufficient permissions) could take on an activity or comment. + * + *

For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request * *

* `REJECT_TRANSITION_REQUEST`: Reject a transition request * *

* `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + * + *

For comments, valid values are: * `EDIT_COMMENT`: Edit the comment + * + *

* `DELETE_COMMENT`: Delete the comment */ @Generated public enum ActivityAction { APPROVE_TRANSITION_REQUEST, // Approve a transition request CANCEL_TRANSITION_REQUEST, // Cancel (delete) a transition request + DELETE_COMMENT, // Delete the comment + EDIT_COMMENT, // Edit the comment REJECT_TRANSITION_REQUEST, // Reject a transition request } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java index 2f1b3780c..f8384a434 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Details required to identify and approve a model version stage transition request. */ @Generated public class ApproveTransitionRequest { /** Specifies whether to archive all current model versions in the target stage. */ @@ -33,7 +34,7 @@ public class ApproveTransitionRequest { *

* `Archived`: Archived stage. */ @JsonProperty("stage") - private Stage stage; + private String stage; /** Version of the model. */ @JsonProperty("version") @@ -66,12 +67,12 @@ public String getName() { return name; } - public ApproveTransitionRequest setStage(Stage stage) { + public ApproveTransitionRequest setStage(String stage) { this.stage = stage; return this; } - public Stage getStage() { + public String getStage() { return stage; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java index 3bf4ef6b8..cd6e89a72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java @@ -9,7 +9,7 @@ @Generated public class ApproveTransitionRequestResponse { - /** Activity recorded for the action. */ + /** New activity generated as a result of this operation. */ @JsonProperty("activity") private Activity activity; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java index 519f16283..2e05ec6da 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java @@ -5,13 +5,23 @@ import com.databricks.sdk.support.Generated; /** - * An action that a user (with sufficient permissions) could take on a comment. Valid values are: * - * `EDIT_COMMENT`: Edit the comment + * An action that a user (with sufficient permissions) could take on an activity or comment. + * + *

For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + * + *

* `REJECT_TRANSITION_REQUEST`: Reject a transition request + * + *

* `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + * + *

For comments, valid values are: * `EDIT_COMMENT`: Edit the comment * *

* `DELETE_COMMENT`: Delete the comment */ @Generated public enum CommentActivityAction { + APPROVE_TRANSITION_REQUEST, // Approve a transition request + CANCEL_TRANSITION_REQUEST, // Cancel (delete) a transition request DELETE_COMMENT, // Delete the comment EDIT_COMMENT, // Edit the comment + REJECT_TRANSITION_REQUEST, // Reject a transition request } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java index bc0cac89e..e203c6f82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java @@ -8,14 +8,17 @@ import java.util.Collection; import java.util.Objects; -/** Comment details. */ +/** + * For activities, this contains the activity recorded for the action. For comments, this contains + * the comment details. For transition requests, this contains the transition request details. + */ @Generated public class CommentObject { /** Array of actions on the activity allowed for the current viewer. */ @JsonProperty("available_actions") private Collection availableActions; - /** User-provided comment on the action. */ + /** User-provided comment associated with the activity, comment, or transition request. */ @JsonProperty("comment") private String comment; @@ -23,7 +26,7 @@ public class CommentObject { @JsonProperty("creation_timestamp") private Long creationTimestamp; - /** Comment ID */ + /** Unique identifier for the object. */ @JsonProperty("id") private String id; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java index fd7f66eb3..b682d161d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Details required to create a comment on a model version. */ @Generated public class CreateComment { /** User-provided comment on the action. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java index 6387f9817..7fb53fad2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateCommentResponse.java @@ -9,7 +9,7 @@ @Generated public class CreateCommentResponse { - /** Comment details. */ + /** New comment object */ @JsonProperty("comment") private CommentObject comment; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateFeatureTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateFeatureTagRequest.java index 564e1b285..84884f207 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateFeatureTagRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateFeatureTagRequest.java @@ -13,7 +13,7 @@ public class CreateFeatureTagRequest { /** */ @JsonIgnore private String featureName; - /** Represents a tag on a feature in a feature table. */ + /** */ @JsonProperty("feature_tag") private FeatureTag featureTag; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateOnlineStoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateOnlineStoreRequest.java index fac5e8b51..44ddea63d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateOnlineStoreRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateOnlineStoreRequest.java @@ -9,7 +9,7 @@ @Generated public class CreateOnlineStoreRequest { - /** An OnlineStore is a logical database instance that stores and serves features online. */ + /** Online store to create. */ @JsonProperty("online_store") private OnlineStore onlineStore; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java index 2f7c37615..3ccf8b812 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** Details required to create a registry webhook. */ @Generated public class CreateRegistryWebhook { /** User-specified description for the webhook. */ @@ -48,11 +49,11 @@ public class CreateRegistryWebhook { @JsonProperty("events") private Collection events; - /** */ + /** External HTTPS URL called on event trigger (by using a POST request). */ @JsonProperty("http_url_spec") private HttpUrlSpec httpUrlSpec; - /** */ + /** ID of the job that the webhook runs. */ @JsonProperty("job_spec") private JobSpec jobSpec; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java index 77f767114..a45efb9a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Details required to create a model version stage transition request. */ @Generated public class CreateTransitionRequest { /** User-provided comment on the action. */ @@ -29,7 +30,7 @@ public class CreateTransitionRequest { *

* `Archived`: Archived stage. */ @JsonProperty("stage") - private Stage stage; + private String stage; /** Version of the model. */ @JsonProperty("version") @@ -53,12 +54,12 @@ public String getName() { return name; } - public CreateTransitionRequest setStage(Stage stage) { + public CreateTransitionRequest setStage(String stage) { this.stage = stage; return this; } - public Stage getStage() { + public String getStage() { return stage; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java index 78d011768..64fd988b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java @@ -9,7 +9,7 @@ @Generated public class CreateTransitionRequestResponse { - /** Transition request details. */ + /** New activity generated for stage transition request. */ @JsonProperty("request") private TransitionRequest request; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java deleted file mode 100755 index eae7ebb02..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteCommentResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteCommentResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java deleted file mode 100755 index 2ef42a8bd..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteExperimentResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteExperimentResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java deleted file mode 100755 index 4032513b7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteLoggedModelResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteLoggedModelResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java deleted file mode 100755 index 5e9f53856..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteLoggedModelTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteLoggedModelTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java deleted file mode 100755 index 1b053c73f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteModelResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteModelResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java deleted file mode 100755 index 983354a74..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteModelTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteModelTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java deleted file mode 100755 index 1988edda3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteModelVersionResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteModelVersionResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java deleted file mode 100755 index d359f15af..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteModelVersionTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteModelVersionTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java deleted file mode 100755 index 8dfe844e7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteRunResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteRunResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java deleted file mode 100755 index c40c832de..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java index a1977c2af..4170e18a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java @@ -41,7 +41,7 @@ public class DeleteTransitionRequestRequest { */ @JsonIgnore @QueryParam("stage") - private DeleteTransitionRequestStage stage; + private String stage; /** Version of the model. */ @JsonIgnore @@ -75,12 +75,12 @@ public String getName() { return name; } - public DeleteTransitionRequestRequest setStage(DeleteTransitionRequestStage stage) { + public DeleteTransitionRequestRequest setStage(String stage) { this.stage = stage; return this; } - public DeleteTransitionRequestStage getStage() { + public String getStage() { return stage; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java index 78bab3b6c..234420b64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java @@ -4,25 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @Generated public class DeleteTransitionRequestResponse { + /** New activity generated as a result of this operation. */ + @JsonProperty("activity") + private Activity activity; + + public DeleteTransitionRequestResponse setActivity(Activity activity) { + this.activity = activity; + return this; + } + + public Activity getActivity() { + return activity; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + DeleteTransitionRequestResponse that = (DeleteTransitionRequestResponse) o; + return Objects.equals(activity, that.activity); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(activity); } @Override public String toString() { - return new ToStringer(DeleteTransitionRequestResponse.class).toString(); + return new ToStringer(DeleteTransitionRequestResponse.class) + .add("activity", activity) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java deleted file mode 100755 index d7325c6b4..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.fasterxml.jackson.annotation.JsonProperty; - -@Generated -public enum DeleteTransitionRequestStage { - @JsonProperty("Archived") - ARCHIVED, - - @JsonProperty("None") - NONE, - - @JsonProperty("Production") - PRODUCTION, - - @JsonProperty("Staging") - STAGING, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java deleted file mode 100755 index 04056259e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteWebhookResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteWebhookResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java index 250fa534d..3001c8954 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java @@ -13,7 +13,7 @@ public class ExperimentAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java index 8988beb05..3eb9dabba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java @@ -18,7 +18,7 @@ public class ExperimentPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java index 0bb142d35..4d832f824 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermissionsDescription.java @@ -13,7 +13,7 @@ public class ExperimentPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index 1b53bb69f..0d39a660b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -66,7 +66,7 @@ public void deleteExperiment(DeleteExperiment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteExperimentResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -79,7 +79,7 @@ public void deleteLoggedModel(DeleteLoggedModelRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteLoggedModelResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -94,7 +94,7 @@ public void deleteLoggedModelTag(DeleteLoggedModelTagRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteLoggedModelTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -108,7 +108,7 @@ public void deleteRun(DeleteRun request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteRunResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -136,7 +136,7 @@ public void deleteTag(DeleteTag request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -284,7 +284,7 @@ public void logBatch(LogBatch request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, LogBatchResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -298,7 +298,7 @@ public void logInputs(LogInputs request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, LogInputsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -312,7 +312,7 @@ public void logLoggedModelParams(LogLoggedModelParamsRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, LogLoggedModelParamsRequestResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -326,7 +326,7 @@ public void logMetric(LogMetric request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, LogMetricResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -340,7 +340,7 @@ public void logModel(LogModel request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, LogModelResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -354,7 +354,7 @@ public void logOutputs(LogOutputsRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, LogOutputsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -368,7 +368,7 @@ public void logParam(LogParam request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, LogParamResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -382,7 +382,7 @@ public void restoreExperiment(RestoreExperiment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, RestoreExperimentResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -396,7 +396,7 @@ public void restoreRun(RestoreRun request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, RestoreRunResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -466,7 +466,7 @@ public void setExperimentTag(SetExperimentTag request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, SetExperimentTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -480,7 +480,7 @@ public void setLoggedModelTags(SetLoggedModelTagsRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, SetLoggedModelTagsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -508,7 +508,7 @@ public void setTag(SetTag request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, SetTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -522,7 +522,7 @@ public void updateExperiment(UpdateExperiment request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateExperimentResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java new file mode 100755 index 000000000..e1fc06628 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Feature for model version. */ +@Generated +public class Feature { + /** Feature name */ + @JsonProperty("feature_name") + private String featureName; + + /** Feature table id */ + @JsonProperty("feature_table_id") + private String featureTableId; + + /** Feature table name */ + @JsonProperty("feature_table_name") + private String featureTableName; + + public Feature setFeatureName(String featureName) { + this.featureName = featureName; + return this; + } + + public String getFeatureName() { + return featureName; + } + + public Feature setFeatureTableId(String featureTableId) { + this.featureTableId = featureTableId; + return this; + } + + public String getFeatureTableId() { + return featureTableId; + } + + public Feature setFeatureTableName(String featureTableName) { + this.featureTableName = featureTableName; + return this; + } + + public String getFeatureTableName() { + return featureTableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Feature that = (Feature) o; + return Objects.equals(featureName, that.featureName) + && Objects.equals(featureTableId, that.featureTableId) + && Objects.equals(featureTableName, that.featureTableName); + } + + @Override + public int hashCode() { + return Objects.hash(featureName, featureTableId, featureTableName); + } + + @Override + public String toString() { + return new ToStringer(Feature.class) + .add("featureName", featureName) + .add("featureTableId", featureTableId) + .add("featureTableName", featureTableName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureList.java new file mode 100755 index 000000000..0c286cfa1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureList.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Feature list wrap all the features for a model version */ +@Generated +public class FeatureList { + /** */ + @JsonProperty("features") + private Collection features; + + public FeatureList setFeatures(Collection features) { + this.features = features; + return this; + } + + public Collection getFeatures() { + return features; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FeatureList that = (FeatureList) o; + return Objects.equals(features, that.features); + } + + @Override + public int hashCode() { + return Objects.hash(features); + } + + @Override + public String toString() { + return new ToStringer(FeatureList.class).add("features", features).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java index 97a48a1db..06e220705 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java @@ -37,7 +37,7 @@ public void deleteOnlineStore(DeleteOnlineStoreRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteOnlineStoreResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java index 281d2982a..0ee47c700 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java @@ -14,9 +14,8 @@ public class JobSpecWithoutSecret { private String jobId; /** - * URL of the workspace containing the job that this webhook runs. Defaults to the workspace URL - * in which the webhook is created. If not specified, the job’s workspace is assumed to be the - * same as the webhook’s. + * URL of the workspace containing the job that this webhook runs. If not specified, the job’s + * workspace URL is assumed to be the same as the workspace where the webhook is created. */ @JsonProperty("workspace_url") private String workspaceUrl; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java index 0f5e5bf08..8e1e27d36 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsRequest.java @@ -10,7 +10,7 @@ @Generated public class ListTransitionRequestsRequest { - /** Name of the model. */ + /** Name of the registered model. */ @JsonIgnore @QueryParam("name") private String name; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java index 4afa99dbf..6e7a9469c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java @@ -12,7 +12,37 @@ @Generated public class ListWebhooksRequest { /** - * If `events` is specified, any webhook with one or more of the specified trigger events is + * Events that trigger the webhook. * `MODEL_VERSION_CREATED`: A new model version was created for + * the associated model. + * + *

* `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. + * + *

* `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. + * + *

* `COMMENT_CREATED`: A user wrote a comment on a registered model. + * + *

* `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only + * be specified for a registry-wide webhook, which can be created by not specifying a model name + * in the create request. + * + *

* `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. + * + *

* `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. + * + *

* `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to + * production. + * + *

* `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. + * + *

* `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned + * to staging. + * + *

* `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be + * transitioned to production. + * + *

* `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. + * + *

If `events` is specified, any webhook with one or more of the specified trigger events is * included in the output. If `events` is not specified, webhooks of all event types are included * in the output. */ @@ -20,9 +50,14 @@ public class ListWebhooksRequest { @QueryParam("events") private Collection events; + /** */ + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + /** - * If not specified, all webhooks associated with the specified events are listed, regardless of - * their associated model. + * Registered model name If not specified, all webhooks associated with the specified events are + * listed, regardless of their associated model. */ @JsonIgnore @QueryParam("model_name") @@ -42,6 +77,15 @@ public Collection getEvents() { return events; } + public ListWebhooksRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + public ListWebhooksRequest setModelName(String modelName) { this.modelName = modelName; return this; @@ -66,19 +110,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListWebhooksRequest that = (ListWebhooksRequest) o; return Objects.equals(events, that.events) + && Objects.equals(maxResults, that.maxResults) && Objects.equals(modelName, that.modelName) && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(events, modelName, pageToken); + return Objects.hash(events, maxResults, modelName, pageToken); } @Override public String toString() { return new ToStringer(ListWebhooksRequest.class) .add("events", events) + .add("maxResults", maxResults) .add("modelName", modelName) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java deleted file mode 100755 index 3281aba0d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class LogBatchResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(LogBatchResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java deleted file mode 100755 index b09b9a878..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class LogInputsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(LogInputsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java deleted file mode 100755 index 770602c54..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class LogLoggedModelParamsRequestResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(LogLoggedModelParamsRequestResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java deleted file mode 100755 index 18ac44107..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class LogMetricResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(LogMetricResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java deleted file mode 100755 index 937328f29..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class LogModelResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(LogModelResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java deleted file mode 100755 index 8e3c962b3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class LogOutputsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(LogOutputsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java deleted file mode 100755 index 5a11a026b..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class LogParamResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(LogParamResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesImpl.java index 44b9126de..f3a723972 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesImpl.java @@ -43,7 +43,7 @@ public void deleteFeatureTag(DeleteFeatureTagRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteFeatureTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java index f4b82307a..05fabefac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelDatabricks.java @@ -22,7 +22,7 @@ public class ModelDatabricks { @JsonProperty("id") private String id; - /** Time of the object at last update, as a Unix timestamp in milliseconds. */ + /** Last update time of the object, as a Unix timestamp in milliseconds. */ @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; @@ -34,10 +34,7 @@ public class ModelDatabricks { @JsonProperty("name") private String name; - /** - * Permission level of the requesting user on the object. For what is allowed at each level, see - * [MLflow Model permissions](..). - */ + /** Permission level granted for the requesting user on this registered model */ @JsonProperty("permission_level") private PermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java index 2c970330f..a9fd2e25c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java @@ -34,7 +34,7 @@ public ModelRegistryAPI(ModelRegistryService mock) { } public ApproveTransitionRequestResponse approveTransitionRequest( - String name, String version, Stage stage, boolean archiveExistingVersions) { + String name, String version, String stage, boolean archiveExistingVersions) { return approveTransitionRequest( new ApproveTransitionRequest() .setName(name) @@ -67,9 +67,8 @@ public CreateModelResponse createModel(String name) { } /** - * Creates a new registered model with the name specified in the request body. - * - *

Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. + * Creates a new registered model with the name specified in the request body. Throws + * `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. */ public CreateModelResponse createModel(CreateModelRequest request) { return impl.createModel(request); @@ -85,7 +84,7 @@ public CreateModelVersionResponse createModelVersion(CreateModelVersionRequest r } public CreateTransitionRequestResponse createTransitionRequest( - String name, String version, Stage stage) { + String name, String version, String stage) { return createTransitionRequest( new CreateTransitionRequest().setName(name).setVersion(version).setStage(stage)); } @@ -99,11 +98,7 @@ public CreateWebhookResponse createWebhook(Collection even return createWebhook(new CreateRegistryWebhook().setEvents(events)); } - /** - * **NOTE**: This endpoint is in Public Preview. - * - *

Creates a registry webhook. - */ + /** **NOTE:** This endpoint is in Public Preview. Creates a registry webhook. */ public CreateWebhookResponse createWebhook(CreateRegistryWebhook request) { return impl.createWebhook(request); } @@ -154,9 +149,9 @@ public void deleteModelVersionTag(DeleteModelVersionTagRequest request) { impl.deleteModelVersionTag(request); } - public void deleteTransitionRequest( - String name, String version, DeleteTransitionRequestStage stage, String creator) { - deleteTransitionRequest( + public DeleteTransitionRequestResponse deleteTransitionRequest( + String name, String version, String stage, String creator) { + return deleteTransitionRequest( new DeleteTransitionRequestRequest() .setName(name) .setVersion(version) @@ -165,15 +160,16 @@ public void deleteTransitionRequest( } /** Cancels a model version stage transition request. */ - public void deleteTransitionRequest(DeleteTransitionRequestRequest request) { - impl.deleteTransitionRequest(request); + public DeleteTransitionRequestResponse deleteTransitionRequest( + DeleteTransitionRequestRequest request) { + return impl.deleteTransitionRequest(request); } - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Deletes a registry webhook. - */ + public void deleteWebhook(String id) { + deleteWebhook(new DeleteWebhookRequest().setId(id)); + } + + /** **NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. */ public void deleteWebhook(DeleteWebhookRequest request) { impl.deleteWebhook(request); } @@ -280,11 +276,7 @@ public Iterable listTransitionRequests(ListTransitionRequestsRequest r response -> null); } - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Lists all registry webhooks. - */ + /** **NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. */ public Iterable listWebhooks(ListWebhooksRequest request) { return new Paginator<>( request, @@ -300,7 +292,7 @@ public Iterable listWebhooks(ListWebhooksRequest request) { } public RejectTransitionRequestResponse rejectTransitionRequest( - String name, String version, Stage stage) { + String name, String version, String stage) { return rejectTransitionRequest( new RejectTransitionRequest().setName(name).setVersion(version).setStage(stage)); } @@ -389,17 +381,13 @@ public TestRegistryWebhookResponse testRegistryWebhook(String id) { return testRegistryWebhook(new TestRegistryWebhookRequest().setId(id)); } - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Tests a registry webhook. - */ + /** **NOTE:** This endpoint is in Public Preview. Tests a registry webhook. */ public TestRegistryWebhookResponse testRegistryWebhook(TestRegistryWebhookRequest request) { return impl.testRegistryWebhook(request); } public TransitionStageResponse transitionStage( - String name, String version, Stage stage, boolean archiveExistingVersions) { + String name, String version, String stage, boolean archiveExistingVersions) { return transitionStage( new TransitionModelVersionStageDatabricks() .setName(name) @@ -410,7 +398,7 @@ public TransitionStageResponse transitionStage( /** * Transition a model version's stage. This is a Databricks workspace version of the [MLflow - * endpoint] that also accepts a comment associated with the transition to be recorded.", + * endpoint] that also accepts a comment associated with the transition to be recorded. * *

[MLflow endpoint]: * https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage @@ -428,22 +416,22 @@ public UpdateCommentResponse updateComment(UpdateComment request) { return impl.updateComment(request); } - public void updateModel(String name) { - updateModel(new UpdateModelRequest().setName(name)); + public UpdateModelResponse updateModel(String name) { + return updateModel(new UpdateModelRequest().setName(name)); } /** Updates a registered model. */ - public void updateModel(UpdateModelRequest request) { - impl.updateModel(request); + public UpdateModelResponse updateModel(UpdateModelRequest request) { + return impl.updateModel(request); } - public void updateModelVersion(String name, String version) { - updateModelVersion(new UpdateModelVersionRequest().setName(name).setVersion(version)); + public UpdateModelVersionResponse updateModelVersion(String name, String version) { + return updateModelVersion(new UpdateModelVersionRequest().setName(name).setVersion(version)); } /** Updates the model version. */ - public void updateModelVersion(UpdateModelVersionRequest request) { - impl.updateModelVersion(request); + public UpdateModelVersionResponse updateModelVersion(UpdateModelVersionRequest request) { + return impl.updateModelVersion(request); } public RegisteredModelPermissions updatePermissions(String registeredModelId) { @@ -459,17 +447,13 @@ public RegisteredModelPermissions updatePermissions(RegisteredModelPermissionsRe return impl.updatePermissions(request); } - public void updateWebhook(String id) { - updateWebhook(new UpdateRegistryWebhook().setId(id)); + public UpdateWebhookResponse updateWebhook(String id) { + return updateWebhook(new UpdateRegistryWebhook().setId(id)); } - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Updates a registry webhook. - */ - public void updateWebhook(UpdateRegistryWebhook request) { - impl.updateWebhook(request); + /** **NOTE:** This endpoint is in Public Preview. Updates a registry webhook. */ + public UpdateWebhookResponse updateWebhook(UpdateRegistryWebhook request) { + return impl.updateWebhook(request); } public ModelRegistryService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java index 357684e9c..f74777e4c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java @@ -108,7 +108,7 @@ public void deleteComment(DeleteCommentRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteCommentResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -121,7 +121,7 @@ public void deleteModel(DeleteModelRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteModelResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -134,7 +134,7 @@ public void deleteModelTag(DeleteModelTagRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteModelTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -147,7 +147,7 @@ public void deleteModelVersion(DeleteModelVersionRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteModelVersionResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -160,20 +160,21 @@ public void deleteModelVersionTag(DeleteModelVersionTagRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteModelVersionTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public void deleteTransitionRequest(DeleteTransitionRequestRequest request) { + public DeleteTransitionRequestResponse deleteTransitionRequest( + DeleteTransitionRequestRequest request) { String path = "/api/2.0/mlflow/transition-requests/delete"; try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteTransitionRequestResponse.class); + return apiClient.execute(req, DeleteTransitionRequestResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -186,7 +187,7 @@ public void deleteWebhook(DeleteWebhookRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteWebhookResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -379,7 +380,7 @@ public void setModelTag(SetModelTagRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, SetModelTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -393,7 +394,7 @@ public void setModelVersionTag(SetModelVersionTagRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, SetModelVersionTagResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -457,28 +458,28 @@ public UpdateCommentResponse updateComment(UpdateComment request) { } @Override - public void updateModel(UpdateModelRequest request) { + public UpdateModelResponse updateModel(UpdateModelRequest request) { String path = "/api/2.0/mlflow/registered-models/update"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateModelResponse.class); + return apiClient.execute(req, UpdateModelResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public void updateModelVersion(UpdateModelVersionRequest request) { + public UpdateModelVersionResponse updateModelVersion(UpdateModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/update"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateModelVersionResponse.class); + return apiClient.execute(req, UpdateModelVersionResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -500,14 +501,14 @@ public RegisteredModelPermissions updatePermissions(RegisteredModelPermissionsRe } @Override - public void updateWebhook(UpdateRegistryWebhook request) { + public UpdateWebhookResponse updateWebhook(UpdateRegistryWebhook request) { String path = "/api/2.0/mlflow/registry-webhooks/update"; try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateWebhookResponse.class); + return apiClient.execute(req, UpdateWebhookResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java index ece320439..416afd402 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java @@ -30,9 +30,8 @@ ApproveTransitionRequestResponse approveTransitionRequest( CreateCommentResponse createComment(CreateComment createComment); /** - * Creates a new registered model with the name specified in the request body. - * - *

Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. + * Creates a new registered model with the name specified in the request body. Throws + * `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. */ CreateModelResponse createModel(CreateModelRequest createModelRequest); @@ -44,11 +43,7 @@ CreateModelVersionResponse createModelVersion( CreateTransitionRequestResponse createTransitionRequest( CreateTransitionRequest createTransitionRequest); - /** - * **NOTE**: This endpoint is in Public Preview. - * - *

Creates a registry webhook. - */ + /** **NOTE:** This endpoint is in Public Preview. Creates a registry webhook. */ CreateWebhookResponse createWebhook(CreateRegistryWebhook createRegistryWebhook); /** Deletes a comment on a model version. */ @@ -67,13 +62,10 @@ CreateTransitionRequestResponse createTransitionRequest( void deleteModelVersionTag(DeleteModelVersionTagRequest deleteModelVersionTagRequest); /** Cancels a model version stage transition request. */ - void deleteTransitionRequest(DeleteTransitionRequestRequest deleteTransitionRequestRequest); + DeleteTransitionRequestResponse deleteTransitionRequest( + DeleteTransitionRequestRequest deleteTransitionRequestRequest); - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Deletes a registry webhook. - */ + /** **NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. */ void deleteWebhook(DeleteWebhookRequest deleteWebhookRequest); /** Gets the latest version of a registered model. */ @@ -113,11 +105,7 @@ RegisteredModelPermissions getPermissions( ListTransitionRequestsResponse listTransitionRequests( ListTransitionRequestsRequest listTransitionRequestsRequest); - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Lists all registry webhooks. - */ + /** **NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. */ ListRegistryWebhooks listWebhooks(ListWebhooksRequest listWebhooksRequest); /** Rejects a model version stage transition request. */ @@ -147,17 +135,13 @@ SearchModelVersionsResponse searchModelVersions( RegisteredModelPermissions setPermissions( RegisteredModelPermissionsRequest registeredModelPermissionsRequest); - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Tests a registry webhook. - */ + /** **NOTE:** This endpoint is in Public Preview. Tests a registry webhook. */ TestRegistryWebhookResponse testRegistryWebhook( TestRegistryWebhookRequest testRegistryWebhookRequest); /** * Transition a model version's stage. This is a Databricks workspace version of the [MLflow - * endpoint] that also accepts a comment associated with the transition to be recorded.", + * endpoint] that also accepts a comment associated with the transition to be recorded. * *

[MLflow endpoint]: * https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage @@ -169,10 +153,11 @@ TransitionStageResponse transitionStage( UpdateCommentResponse updateComment(UpdateComment updateComment); /** Updates a registered model. */ - void updateModel(UpdateModelRequest updateModelRequest); + UpdateModelResponse updateModel(UpdateModelRequest updateModelRequest); /** Updates the model version. */ - void updateModelVersion(UpdateModelVersionRequest updateModelVersionRequest); + UpdateModelVersionResponse updateModelVersion( + UpdateModelVersionRequest updateModelVersionRequest); /** * Updates the permissions on a registered model. Registered models can inherit permissions from @@ -181,10 +166,6 @@ TransitionStageResponse transitionStage( RegisteredModelPermissions updatePermissions( RegisteredModelPermissionsRequest registeredModelPermissionsRequest); - /** - * **NOTE:** This endpoint is in Public Preview. - * - *

Updates a registry webhook. - */ - void updateWebhook(UpdateRegistryWebhook updateRegistryWebhook); + /** **NOTE:** This endpoint is in Public Preview. Updates a registry webhook. */ + UpdateWebhookResponse updateWebhook(UpdateRegistryWebhook updateRegistryWebhook); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java index e96f3ad84..1731145a3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Tag for a registered model */ @Generated public class ModelTag { /** The tag key. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java index 89602be70..b11b61291 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java @@ -14,24 +14,25 @@ public class ModelVersionDatabricks { @JsonProperty("creation_timestamp") private Long creationTimestamp; - /** - * Stage of the model version. Valid values are: - * - *

* `None`: The initial stage of a model version. - * - *

* `Staging`: Staging or pre-production stage. - * - *

* `Production`: Production stage. - * - *

* `Archived`: Archived stage. - */ + /** */ @JsonProperty("current_stage") - private Stage currentStage; + private String currentStage; /** User-specified description for the object. */ @JsonProperty("description") private String description; + /** + * Email Subscription Status: This is the subscription status of the user to the model version + * Users get subscribed by interacting with the model version. + */ + @JsonProperty("email_subscription_status") + private RegistryEmailSubscriptionType emailSubscriptionStatus; + + /** Feature lineage of `model_version`. */ + @JsonProperty("feature_list") + private FeatureList featureList; + /** Time of the object at last update, as a Unix timestamp in milliseconds. */ @JsonProperty("last_updated_timestamp") private Long lastUpdatedTimestamp; @@ -41,9 +42,13 @@ public class ModelVersionDatabricks { private String name; /** - * Permission level of the requesting user on the object. For what is allowed at each level, see - * [MLflow Model permissions](..). + * Open requests for this `model_versions`. Gap in sequence number is intentional and is done in + * order to match field sequence numbers of `ModelVersion` proto message */ + @JsonProperty("open_requests") + private Collection openRequests; + + /** */ @JsonProperty("permission_level") private PermissionLevel permissionLevel; @@ -66,14 +71,7 @@ public class ModelVersionDatabricks { @JsonProperty("source") private String source; - /** - * The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to - * register a new model version is pending as server performs background tasks. - * - *

* `FAILED_REGISTRATION`: Request to register a new model version has failed. - * - *

* `READY`: Model version is ready for use. - */ + /** */ @JsonProperty("status") private Status status; @@ -102,12 +100,12 @@ public Long getCreationTimestamp() { return creationTimestamp; } - public ModelVersionDatabricks setCurrentStage(Stage currentStage) { + public ModelVersionDatabricks setCurrentStage(String currentStage) { this.currentStage = currentStage; return this; } - public Stage getCurrentStage() { + public String getCurrentStage() { return currentStage; } @@ -120,6 +118,25 @@ public String getDescription() { return description; } + public ModelVersionDatabricks setEmailSubscriptionStatus( + RegistryEmailSubscriptionType emailSubscriptionStatus) { + this.emailSubscriptionStatus = emailSubscriptionStatus; + return this; + } + + public RegistryEmailSubscriptionType getEmailSubscriptionStatus() { + return emailSubscriptionStatus; + } + + public ModelVersionDatabricks setFeatureList(FeatureList featureList) { + this.featureList = featureList; + return this; + } + + public FeatureList getFeatureList() { + return featureList; + } + public ModelVersionDatabricks setLastUpdatedTimestamp(Long lastUpdatedTimestamp) { this.lastUpdatedTimestamp = lastUpdatedTimestamp; return this; @@ -138,6 +155,15 @@ public String getName() { return name; } + public ModelVersionDatabricks setOpenRequests(Collection openRequests) { + this.openRequests = openRequests; + return this; + } + + public Collection getOpenRequests() { + return openRequests; + } + public ModelVersionDatabricks setPermissionLevel(PermissionLevel permissionLevel) { this.permissionLevel = permissionLevel; return this; @@ -227,8 +253,11 @@ public boolean equals(Object o) { return Objects.equals(creationTimestamp, that.creationTimestamp) && Objects.equals(currentStage, that.currentStage) && Objects.equals(description, that.description) + && Objects.equals(emailSubscriptionStatus, that.emailSubscriptionStatus) + && Objects.equals(featureList, that.featureList) && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp) && Objects.equals(name, that.name) + && Objects.equals(openRequests, that.openRequests) && Objects.equals(permissionLevel, that.permissionLevel) && Objects.equals(runId, that.runId) && Objects.equals(runLink, that.runLink) @@ -246,8 +275,11 @@ public int hashCode() { creationTimestamp, currentStage, description, + emailSubscriptionStatus, + featureList, lastUpdatedTimestamp, name, + openRequests, permissionLevel, runId, runLink, @@ -265,8 +297,11 @@ public String toString() { .add("creationTimestamp", creationTimestamp) .add("currentStage", currentStage) .add("description", description) + .add("emailSubscriptionStatus", emailSubscriptionStatus) + .add("featureList", featureList) .add("lastUpdatedTimestamp", lastUpdatedTimestamp) .add("name", name) + .add("openRequests", openRequests) .add("permissionLevel", permissionLevel) .add("runId", runId) .add("runLink", runLink) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionStatus.java index e6a360cd0..d98ef47b1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionStatus.java @@ -4,10 +4,18 @@ import com.databricks.sdk.support.Generated; -/** Current status of `model_version` */ +/** + * The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register + * a new model version is pending as server performs background tasks. + * + *

* `FAILED_REGISTRATION`: Request to register a new model version has failed. + * + *

* `READY`: Model version is ready for use. + */ @Generated public enum ModelVersionStatus { - FAILED_REGISTRATION, - PENDING_REGISTRATION, - READY, + FAILED_REGISTRATION, // Request to register a new model version has failed. + PENDING_REGISTRATION, // Request to register a new model version is pending as server performs + // background tasks. + READY, // Model version is ready for use. } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java index 1ff6988ed..d4aa69a14 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java @@ -10,6 +10,7 @@ */ @Generated public enum PermissionLevel { + CAN_CREATE_REGISTERED_MODEL, CAN_EDIT, CAN_MANAGE, CAN_MANAGE_PRODUCTION_VERSIONS, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java index 6444b7a8a..16874b850 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java @@ -13,10 +13,7 @@ public class PublishSpec { @JsonProperty("online_store") private String onlineStore; - /** - * The full three-part (catalog, schema, table) name of the online table. Auto-generated if not - * specified. - */ + /** The full three-part (catalog, schema, table) name of the online table. */ @JsonProperty("online_table_name") private String onlineTableName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java index ec6ad3f55..586d99242 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java @@ -13,7 +13,7 @@ public class RegisteredModelAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java index 3bc0130f2..034aeff12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java @@ -18,7 +18,7 @@ public class RegisteredModelPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java index b77b306d4..b88e1159c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java @@ -13,7 +13,7 @@ public class RegisteredModelPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryEmailSubscriptionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryEmailSubscriptionType.java new file mode 100755 index 000000000..57083862e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryEmailSubscriptionType.java @@ -0,0 +1,19 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +/** + * .. note:: Experimental: This entity may change or be removed in a future release without warning. + * Email subscription types for registry notifications: - `ALL_EVENTS`: Subscribed to all events. - + * `DEFAULT`: Default subscription type. - `SUBSCRIBED`: Subscribed to notifications. - + * `UNSUBSCRIBED`: Not subscribed to notifications. + */ +@Generated +public enum RegistryEmailSubscriptionType { + ALL_EVENTS, // Subscribed to all events. + DEFAULT, // Default subscription type. + SUBSCRIBED, // Subscribed to notifications. + UNSUBSCRIBED, // Not subscribed to notifications. +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java index 10fcb66c0..96998a77d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java @@ -72,15 +72,7 @@ public class RegistryWebhook { @JsonProperty("model_name") private String modelName; - /** - * Enable or disable triggering the webhook, or put the webhook into test mode. The default is - * `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - * - *

* `DISABLED`: Webhook is not triggered. - * - *

* `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - * real event. - */ + /** */ @JsonProperty("status") private RegistryWebhookStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java index c781e08a7..eabdba926 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Details required to identify and reject a model version stage transition request. */ @Generated public class RejectTransitionRequest { /** User-provided comment on the action. */ @@ -29,7 +30,7 @@ public class RejectTransitionRequest { *

* `Archived`: Archived stage. */ @JsonProperty("stage") - private Stage stage; + private String stage; /** Version of the model. */ @JsonProperty("version") @@ -53,12 +54,12 @@ public String getName() { return name; } - public RejectTransitionRequest setStage(Stage stage) { + public RejectTransitionRequest setStage(String stage) { this.stage = stage; return this; } - public Stage getStage() { + public String getStage() { return stage; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java index 8a568ffba..94daacea7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java @@ -9,7 +9,7 @@ @Generated public class RejectTransitionRequestResponse { - /** Activity recorded for the action. */ + /** New activity generated as a result of this operation. */ @JsonProperty("activity") private Activity activity; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java deleted file mode 100755 index eb0e4f4e3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class RestoreExperimentResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(RestoreExperimentResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java deleted file mode 100755 index 3fa8ef75f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class RestoreRunResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(RestoreRunResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java deleted file mode 100755 index 2f62954b2..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SetExperimentTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SetExperimentTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java deleted file mode 100755 index 924dacc20..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SetLoggedModelTagsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SetLoggedModelTagsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java deleted file mode 100755 index a741183c3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SetModelTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SetModelTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java deleted file mode 100755 index 26d6245bf..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SetModelVersionTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SetModelVersionTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java deleted file mode 100755 index 89d485ce0..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SetTagResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SetTagResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java deleted file mode 100755 index 7810e70c6..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java +++ /dev/null @@ -1,32 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * Stage of the model version. Valid values are: - * - *

* `None`: The initial stage of a model version. - * - *

* `Staging`: Staging or pre-production stage. - * - *

* `Production`: Production stage. - * - *

* `Archived`: Archived stage. - */ -@Generated -public enum Stage { - @JsonProperty("Archived") - ARCHIVED, // Archived stage. - - @JsonProperty("None") - NONE, // The initial stage of a model version. - - @JsonProperty("Production") - PRODUCTION, // Production stage. - - @JsonProperty("Staging") - STAGING, // Staging or pre-production stage. -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java deleted file mode 100755 index 16729c8e2..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -/** Test webhook response object. */ -@Generated -public class TestRegistryWebhook { - /** Body of the response from the webhook URL */ - @JsonProperty("body") - private String body; - - /** Status code returned by the webhook URL */ - @JsonProperty("status_code") - private Long statusCode; - - public TestRegistryWebhook setBody(String body) { - this.body = body; - return this; - } - - public String getBody() { - return body; - } - - public TestRegistryWebhook setStatusCode(Long statusCode) { - this.statusCode = statusCode; - return this; - } - - public Long getStatusCode() { - return statusCode; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TestRegistryWebhook that = (TestRegistryWebhook) o; - return Objects.equals(body, that.body) && Objects.equals(statusCode, that.statusCode); - } - - @Override - public int hashCode() { - return Objects.hash(body, statusCode); - } - - @Override - public String toString() { - return new ToStringer(TestRegistryWebhook.class) - .add("body", body) - .add("statusCode", statusCode) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java index be3a7d261..22e497c89 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Details required to test a registry webhook. */ @Generated public class TestRegistryWebhookRequest { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java index f74b558bb..cf3bda13c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java @@ -9,17 +9,30 @@ @Generated public class TestRegistryWebhookResponse { - /** Test webhook response object. */ - @JsonProperty("webhook") - private TestRegistryWebhook webhook; + /** Body of the response from the webhook URL */ + @JsonProperty("body") + private String body; - public TestRegistryWebhookResponse setWebhook(TestRegistryWebhook webhook) { - this.webhook = webhook; + /** Status code returned by the webhook URL */ + @JsonProperty("status_code") + private Long statusCode; + + public TestRegistryWebhookResponse setBody(String body) { + this.body = body; + return this; + } + + public String getBody() { + return body; + } + + public TestRegistryWebhookResponse setStatusCode(Long statusCode) { + this.statusCode = statusCode; return this; } - public TestRegistryWebhook getWebhook() { - return webhook; + public Long getStatusCode() { + return statusCode; } @Override @@ -27,16 +40,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TestRegistryWebhookResponse that = (TestRegistryWebhookResponse) o; - return Objects.equals(webhook, that.webhook); + return Objects.equals(body, that.body) && Objects.equals(statusCode, that.statusCode); } @Override public int hashCode() { - return Objects.hash(webhook); + return Objects.hash(body, statusCode); } @Override public String toString() { - return new ToStringer(TestRegistryWebhookResponse.class).add("webhook", webhook).toString(); + return new ToStringer(TestRegistryWebhookResponse.class) + .add("body", body) + .add("statusCode", statusCode) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java index 0e69b1bf3..01b3c0fb8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Details required to transition a model version's stage. */ @Generated public class TransitionModelVersionStageDatabricks { /** Specifies whether to archive all current model versions in the target stage. */ @@ -33,7 +34,7 @@ public class TransitionModelVersionStageDatabricks { *

* `Archived`: Archived stage. */ @JsonProperty("stage") - private Stage stage; + private String stage; /** Version of the model. */ @JsonProperty("version") @@ -67,12 +68,12 @@ public String getName() { return name; } - public TransitionModelVersionStageDatabricks setStage(Stage stage) { + public TransitionModelVersionStageDatabricks setStage(String stage) { this.stage = stage; return this; } - public Stage getStage() { + public String getStage() { return stage; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java index ebfb7c60e..7296f391a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java @@ -8,14 +8,17 @@ import java.util.Collection; import java.util.Objects; -/** Transition request details. */ +/** + * For activities, this contains the activity recorded for the action. For comments, this contains + * the comment details. For transition requests, this contains the transition request details. + */ @Generated public class TransitionRequest { /** Array of actions on the activity allowed for the current viewer. */ @JsonProperty("available_actions") private Collection availableActions; - /** User-provided comment associated with the transition request. */ + /** User-provided comment associated with the activity, comment, or transition request. */ @JsonProperty("comment") private String comment; @@ -35,7 +38,7 @@ public class TransitionRequest { *

* `Archived`: Archived stage. */ @JsonProperty("to_stage") - private Stage toStage; + private String toStage; /** The username of the user that created the object. */ @JsonProperty("user_id") @@ -68,12 +71,12 @@ public Long getCreationTimestamp() { return creationTimestamp; } - public TransitionRequest setToStage(Stage toStage) { + public TransitionRequest setToStage(String toStage) { this.toStage = toStage; return this; } - public Stage getToStage() { + public String getToStage() { return toStage; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java index 22e210045..7a488f16b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java @@ -9,17 +9,18 @@ @Generated public class TransitionStageResponse { - /** */ - @JsonProperty("model_version") - private ModelVersionDatabricks modelVersion; + /** Updated model version */ + @JsonProperty("model_version_databricks") + private ModelVersionDatabricks modelVersionDatabricks; - public TransitionStageResponse setModelVersion(ModelVersionDatabricks modelVersion) { - this.modelVersion = modelVersion; + public TransitionStageResponse setModelVersionDatabricks( + ModelVersionDatabricks modelVersionDatabricks) { + this.modelVersionDatabricks = modelVersionDatabricks; return this; } - public ModelVersionDatabricks getModelVersion() { - return modelVersion; + public ModelVersionDatabricks getModelVersionDatabricks() { + return modelVersionDatabricks; } @Override @@ -27,18 +28,18 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TransitionStageResponse that = (TransitionStageResponse) o; - return Objects.equals(modelVersion, that.modelVersion); + return Objects.equals(modelVersionDatabricks, that.modelVersionDatabricks); } @Override public int hashCode() { - return Objects.hash(modelVersion); + return Objects.hash(modelVersionDatabricks); } @Override public String toString() { return new ToStringer(TransitionStageResponse.class) - .add("modelVersion", modelVersion) + .add("modelVersionDatabricks", modelVersionDatabricks) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java index 22cec7325..b51ece0fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Details required to edit a comment on a model version. */ @Generated public class UpdateComment { /** User-provided comment on the action. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java index e453a67b6..ebfcdafba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java @@ -9,7 +9,7 @@ @Generated public class UpdateCommentResponse { - /** Comment details. */ + /** Updated comment object */ @JsonProperty("comment") private CommentObject comment; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java deleted file mode 100755 index 4ee79bb74..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateExperimentResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateExperimentResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java index a651bd1cf..3e8c0091e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java @@ -14,7 +14,7 @@ public class UpdateFeatureTagRequest { /** */ @JsonIgnore private String featureName; - /** Represents a tag on a feature in a feature table. */ + /** */ @JsonProperty("feature_tag") private FeatureTag featureTag; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java index 759e6d5e7..bbf1fc01d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java @@ -4,25 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @Generated public class UpdateModelResponse { + /** */ + @JsonProperty("registered_model") + private Model registeredModel; + + public UpdateModelResponse setRegisteredModel(Model registeredModel) { + this.registeredModel = registeredModel; + return this; + } + + public Model getRegisteredModel() { + return registeredModel; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + UpdateModelResponse that = (UpdateModelResponse) o; + return Objects.equals(registeredModel, that.registeredModel); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(registeredModel); } @Override public String toString() { - return new ToStringer(UpdateModelResponse.class).toString(); + return new ToStringer(UpdateModelResponse.class) + .add("registeredModel", registeredModel) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java index acdc9d1cc..524f42913 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java @@ -4,25 +4,41 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @Generated public class UpdateModelVersionResponse { + /** Return new version number generated for this model in registry. */ + @JsonProperty("model_version") + private ModelVersion modelVersion; + + public UpdateModelVersionResponse setModelVersion(ModelVersion modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public ModelVersion getModelVersion() { + return modelVersion; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + UpdateModelVersionResponse that = (UpdateModelVersionResponse) o; + return Objects.equals(modelVersion, that.modelVersion); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(modelVersion); } @Override public String toString() { - return new ToStringer(UpdateModelVersionResponse.class).toString(); + return new ToStringer(UpdateModelVersionResponse.class) + .add("modelVersion", modelVersion) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java index d6daac367..09868bdf0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java @@ -14,7 +14,7 @@ public class UpdateOnlineStoreRequest { /** The name of the online store. This is the unique identifier for the online store. */ @JsonIgnore private String name; - /** An OnlineStore is a logical database instance that stores and serves features online. */ + /** Online store to update. */ @JsonProperty("online_store") private OnlineStore onlineStore; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java index 285496290..01f993e99 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java @@ -8,6 +8,10 @@ import java.util.Collection; import java.util.Objects; +/** + * Details required to update a registry webhook. Only the fields that need to be updated should be + * specified, and both `http_url_spec` and `job_spec` should not be specified in the same request. + */ @Generated public class UpdateRegistryWebhook { /** User-specified description for the webhook. */ @@ -60,15 +64,7 @@ public class UpdateRegistryWebhook { @JsonProperty("job_spec") private JobSpec jobSpec; - /** - * Enable or disable triggering the webhook, or put the webhook into test mode. The default is - * `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - * - *

* `DISABLED`: Webhook is not triggered. - * - *

* `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - * real event. - */ + /** */ @JsonProperty("status") private RegistryWebhookStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java index 64b5de737..1003dd8f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java @@ -4,25 +4,39 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @Generated public class UpdateWebhookResponse { + /** */ + @JsonProperty("webhook") + private RegistryWebhook webhook; + + public UpdateWebhookResponse setWebhook(RegistryWebhook webhook) { + this.webhook = webhook; + return this; + } + + public RegistryWebhook getWebhook() { + return webhook; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + UpdateWebhookResponse that = (UpdateWebhookResponse) o; + return Objects.equals(webhook, that.webhook); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(webhook); } @Override public String toString() { - return new ToStringer(UpdateWebhookResponse.class).toString(); + return new ToStringer(UpdateWebhookResponse.class).add("webhook", webhook).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java index e61b618f6..193bd3d5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java @@ -41,7 +41,7 @@ public void delete(DeleteAccountFederationPolicyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java index 3aa7acba1..b15f132b7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java @@ -42,7 +42,7 @@ public void delete(DeleteCustomAppIntegrationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteCustomAppIntegrationOutput.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -90,7 +90,7 @@ public void update(UpdateCustomAppIntegration request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateCustomAppIntegrationOutput.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java deleted file mode 100755 index 9739ec0e8..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.oauth2; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteCustomAppIntegrationOutput { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteCustomAppIntegrationOutput.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java deleted file mode 100755 index cbc352c7d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.oauth2; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeletePublishedAppIntegrationOutput { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeletePublishedAppIntegrationOutput.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java deleted file mode 100755 index 6c5276a49..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.oauth2; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java index 18db0601f..842c5bea1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java @@ -29,7 +29,7 @@ public class FederationPolicy { @JsonProperty("name") private String name; - /** Specifies the policy to use for validating OIDC claims in your federated tokens. */ + /** */ @JsonProperty("oidc_policy") private OidcFederationPolicy oidcPolicy; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java index e2592779a..84fba336f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java @@ -43,7 +43,7 @@ public void delete(DeletePublishedAppIntegrationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeletePublishedAppIntegrationOutput.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -92,7 +92,7 @@ public void update(UpdatePublishedAppIntegration request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdatePublishedAppIntegrationOutput.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java index 322518bc9..3d1eb0349 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java @@ -45,7 +45,7 @@ public void delete(DeleteServicePrincipalFederationPolicyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java index 3579430b5..75cee9e08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java @@ -44,7 +44,7 @@ public void delete(DeleteServicePrincipalSecretRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java deleted file mode 100755 index 45f262320..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.oauth2; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateCustomAppIntegrationOutput { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateCustomAppIntegrationOutput.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java deleted file mode 100755 index efa8717d1..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.oauth2; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdatePublishedAppIntegrationOutput { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdatePublishedAppIntegrationOutput.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index 85f4d3dbc..167282b32 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -115,14 +115,7 @@ public class CreatePipeline { @JsonProperty("root_path") private String rootPath; - /** - * Write-only setting, available only in Create/Update calls. Specifies the user or service - * principal that the pipeline runs as. If not specified, the pipeline runs as the user who - * created the pipeline. - * - *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an - * error is thrown. - */ + /** */ @JsonProperty("run_as") private RunAs runAs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java deleted file mode 100755 index 103293d0f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.pipelines; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeletePipelineResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeletePipelineResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index add58b4bd..5b8e3c4f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -124,14 +124,7 @@ public class EditPipeline { @JsonProperty("root_path") private String rootPath; - /** - * Write-only setting, available only in Create/Update calls. Specifies the user or service - * principal that the pipeline runs as. If not specified, the pipeline runs as the user who - * created the pipeline. - * - *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an - * error is thrown. - */ + /** */ @JsonProperty("run_as") private RunAs runAs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java deleted file mode 100755 index 2bb8b38a5..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.pipelines; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class EditPipelineResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(EditPipelineResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java index 0654879e3..ff158fa65 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java @@ -46,6 +46,14 @@ public class GetPipelineResponse { @JsonProperty("pipeline_id") private String pipelineId; + /** + * The user or service principal that the pipeline runs as, if specified in the request. This + * field indicates the explicit configuration of `run_as` for the pipeline. To find the value in + * all cases, explicit or implicit, use `run_as_user_name`. + */ + @JsonProperty("run_as") + private RunAs runAs; + /** Username of the user that the pipeline will run on behalf of. */ @JsonProperty("run_as_user_name") private String runAsUserName; @@ -139,6 +147,15 @@ public String getPipelineId() { return pipelineId; } + public GetPipelineResponse setRunAs(RunAs runAs) { + this.runAs = runAs; + return this; + } + + public RunAs getRunAs() { + return runAs; + } + public GetPipelineResponse setRunAsUserName(String runAsUserName) { this.runAsUserName = runAsUserName; return this; @@ -180,6 +197,7 @@ public boolean equals(Object o) { && Objects.equals(latestUpdates, that.latestUpdates) && Objects.equals(name, that.name) && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(runAs, that.runAs) && Objects.equals(runAsUserName, that.runAsUserName) && Objects.equals(spec, that.spec) && Objects.equals(state, that.state); @@ -197,6 +215,7 @@ public int hashCode() { latestUpdates, name, pipelineId, + runAs, runAsUserName, spec, state); @@ -214,6 +233,7 @@ public String toString() { .add("latestUpdates", latestUpdates) .add("name", name) .add("pipelineId", pipelineId) + .add("runAs", runAs) .add("runAsUserName", runAsUserName) .add("spec", spec) .add("state", state) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java index 272a8235d..bf68ec227 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java @@ -6,6 +6,7 @@ @Generated public enum IngestionSourceType { + BIGQUERY, DYNAMICS365, GA4_RAW_DATA, MANAGED_POSTGRESQL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java index 27b567277..4aa4ca42f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java @@ -13,7 +13,7 @@ public class PipelineAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java index 3911806c8..880e47d08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java @@ -18,7 +18,7 @@ public class PipelinePermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java index cbe547625..33c50041e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermissionsDescription.java @@ -13,7 +13,7 @@ public class PipelinePermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java index dafa63362..c454b4208 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineStateInfo.java @@ -41,7 +41,7 @@ public class PipelineStateInfo { @JsonProperty("run_as_user_name") private String runAsUserName; - /** The pipeline state. */ + /** */ @JsonProperty("state") private PipelineState state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java index 91077477d..0fdad690a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java @@ -37,7 +37,7 @@ public void delete(DeletePipelineRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeletePipelineResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -174,7 +174,7 @@ public void stop(StopRequest request) { Request req = new Request("POST", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, StopPipelineResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -188,7 +188,7 @@ public void update(EditPipeline request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, EditPipelineResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java index 687fa8be1..14df875b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java @@ -11,7 +11,7 @@ @Generated public class StartUpdate { - /** What triggered this update. */ + /** */ @JsonProperty("cause") private StartUpdateCause cause; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java deleted file mode 100755 index 50dfb4c77..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.pipelines; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class StopPipelineResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(StopPipelineResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java index 5149ef721..6e7b2e749 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfo.java @@ -13,7 +13,7 @@ public class UpdateStateInfo { @JsonProperty("creation_time") private String creationTime; - /** The update state. */ + /** */ @JsonProperty("state") private UpdateStateInfoState state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java index 31dea89b6..ca8290c0b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CloudResourceContainer.java @@ -10,7 +10,7 @@ /** The general workspace configurations that are specific to cloud providers. */ @Generated public class CloudResourceContainer { - /** The general workspace configurations that are specific to Google Cloud. */ + /** */ @JsonProperty("gcp") private CustomerFacingGcpCloudResourceContainer gcp; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java index 0c7ef5613..9dff47e10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateNetworkRequest.java @@ -10,10 +10,7 @@ @Generated public class CreateNetworkRequest { - /** - * The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - * secondary IP ranges). - */ + /** */ @JsonProperty("gcp_network_info") private GcpNetworkInfo gcpNetworkInfo; @@ -35,12 +32,7 @@ public class CreateNetworkRequest { @JsonProperty("subnet_ids") private Collection subnetIds; - /** - * If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - * [AWS PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ - */ + /** */ @JsonProperty("vpc_endpoints") private NetworkVpcEndpoints vpcEndpoints; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java index fdc409d47..17bbcebc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java @@ -9,7 +9,7 @@ @Generated public class CreateStorageConfigurationRequest { - /** Root S3 bucket information. */ + /** */ @JsonProperty("root_bucket_info") private RootBucketInfo rootBucketInfo; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java index 1d87036a1..00ed46856 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java @@ -13,7 +13,7 @@ public class CreateVpcEndpointRequest { @JsonProperty("aws_vpc_endpoint_id") private String awsVpcEndpointId; - /** The Google Cloud specific information for this Private Service Connect endpoint. */ + /** */ @JsonProperty("gcp_vpc_endpoint_info") private GcpVpcEndpointInfo gcpVpcEndpointInfo; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java index 2cc6ec80a..31d107a91 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java @@ -21,7 +21,7 @@ public class CreateWorkspaceRequest { @JsonProperty("cloud") private String cloud; - /** The general workspace configurations that are specific to cloud providers. */ + /** */ @JsonProperty("cloud_resource_container") private CloudResourceContainer cloudResourceContainer; @@ -65,31 +65,11 @@ public class CreateWorkspaceRequest { @JsonProperty("deployment_name") private String deploymentName; - /** - * The network settings for the workspace. The configurations are only for Databricks-managed - * VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the - * IP range configurations must be mutually exclusive. An attempt to create a workspace fails if - * Databricks detects an IP range overlap. - * - *

Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, - * and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, - * `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - * - *

The sizes of these IP ranges affect the maximum number of nodes for the workspace. - * - *

**Important**: Confirm the IP ranges used by your Databricks workspace before creating the - * workspace. You cannot change them after your workspace is deployed. If the IP address ranges - * for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to - * fail. To determine the address range sizes that you need, Databricks provides a calculator as a - * Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - * - *

[calculate subnet sizes for a new workspace]: - * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html - */ + /** */ @JsonProperty("gcp_managed_network_config") private GcpManagedNetworkConfig gcpManagedNetworkConfig; - /** The configurations for the GKE cluster of a Databricks workspace. */ + /** */ @JsonProperty("gke_config") private GkeConfig gkeConfig; @@ -117,11 +97,7 @@ public class CreateWorkspaceRequest { @JsonProperty("network_id") private String networkId; - /** - * The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - * - *

[AWS Pricing]: https://databricks.com/product/aws-pricing - */ + /** */ @JsonProperty("pricing_tier") private PricingTier pricingTier; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java index 581cd163a..4aca2d8bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java @@ -42,7 +42,7 @@ public void delete(DeleteCredentialRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java deleted file mode 100755 index 053e59bd5..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.provisioning; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java index 8e103d747..20424aec0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java @@ -43,7 +43,7 @@ public void delete(DeleteEncryptionKeyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java index 428d49863..b149a4d48 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java @@ -22,10 +22,7 @@ public class Network { @JsonProperty("error_messages") private Collection errorMessages; - /** - * The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - * secondary IP ranges). - */ + /** */ @JsonProperty("gcp_network_info") private GcpNetworkInfo gcpNetworkInfo; @@ -45,12 +42,7 @@ public class Network { @JsonProperty("subnet_ids") private Collection subnetIds; - /** - * If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - * [AWS PrivateLink]. - * - *

[AWS PrivateLink]: https://aws.amazon.com/privatelink/ - */ + /** */ @JsonProperty("vpc_endpoints") private NetworkVpcEndpoints vpcEndpoints; @@ -61,10 +53,7 @@ public class Network { @JsonProperty("vpc_id") private String vpcId; - /** - * The status of this network configuration object in terms of its use in a workspace: * - * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned. - */ + /** */ @JsonProperty("vpc_status") private VpcStatus vpcStatus; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java index 8d00d7c3e..45b0dc60a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java @@ -13,10 +13,7 @@ public class NetworkHealth { @JsonProperty("error_message") private String errorMessage; - /** - * The AWS resource associated with this error: credentials, VPC, subnet, security group, or - * network ACL. - */ + /** */ @JsonProperty("error_type") private ErrorType errorType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java index 8db624574..8e7d14a8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java @@ -13,7 +13,7 @@ public class NetworkWarning { @JsonProperty("warning_message") private String warningMessage; - /** The AWS resource associated with this warning: a subnet or a security group. */ + /** */ @JsonProperty("warning_type") private WarningType warningType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java index cdd5f594b..5a6c8d710 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java @@ -41,7 +41,7 @@ public void delete(DeleteNetworkRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java index 5fd0babfc..948896927 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java @@ -43,7 +43,7 @@ public void delete(DeletePrivateAccesRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -86,7 +86,7 @@ public void replace(UpsertPrivateAccessSettingsRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ReplaceResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java index 7be0b8b37..03c466d78 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java @@ -18,13 +18,7 @@ public class PrivateAccessSettings { @JsonProperty("allowed_vpc_endpoint_ids") private Collection allowedVpcEndpointIds; - /** - * The private access level controls which VPC endpoints can connect to the UI or API of any - * workspace that attaches this private access settings object. * `ACCOUNT` level access (the - * default) allows only VPC endpoints that are registered in your Databricks account connect to - * your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - * workspace. For details, see `allowed_vpc_endpoint_ids`. - */ + /** */ @JsonProperty("private_access_level") private PrivateAccessLevel privateAccessLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java deleted file mode 100755 index de0d3423e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.provisioning; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class ReplaceResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(ReplaceResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java index 59ce78074..a20cf9f3b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java @@ -17,7 +17,7 @@ public class StorageConfiguration { @JsonProperty("creation_time") private Long creationTime; - /** Root S3 bucket information. */ + /** */ @JsonProperty("root_bucket_info") private RootBucketInfo rootBucketInfo; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java index 3489de525..6be79f6d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java @@ -43,7 +43,7 @@ public void delete(DeleteStorageRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java deleted file mode 100755 index 0ffcdf69e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.provisioning; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java index ed1ea553d..7efe220b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java @@ -28,13 +28,7 @@ public class UpsertPrivateAccessSettingsRequest { @JsonProperty("allowed_vpc_endpoint_ids") private Collection allowedVpcEndpointIds; - /** - * The private access level controls which VPC endpoints can connect to the UI or API of any - * workspace that attaches this private access settings object. * `ACCOUNT` level access (the - * default) allows only VPC endpoints that are registered in your Databricks account connect to - * your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - * workspace. For details, see `allowed_vpc_endpoint_ids`. - */ + /** */ @JsonProperty("private_access_level") private PrivateAccessLevel privateAccessLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java index 11e9be7b1..82fe9b177 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java @@ -33,7 +33,7 @@ public class VpcEndpoint { @JsonProperty("aws_vpc_endpoint_id") private String awsVpcEndpointId; - /** The Google Cloud specific information for this Private Service Connect endpoint. */ + /** */ @JsonProperty("gcp_vpc_endpoint_info") private GcpVpcEndpointInfo gcpVpcEndpointInfo; @@ -51,12 +51,7 @@ public class VpcEndpoint { @JsonProperty("state") private String state; - /** - * This enumeration represents the type of Databricks VPC [endpoint service] that was used when - * creating this VPC endpoint. - * - *

[endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html - */ + /** */ @JsonProperty("use_case") private EndpointUseCase useCase; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java index 68a709bc6..c6141bb98 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java @@ -42,7 +42,7 @@ public void delete(DeleteVpcEndpointRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java index 4d6b61c9d..ea983a894 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java @@ -26,7 +26,7 @@ public class Workspace { @JsonProperty("cloud") private String cloud; - /** The general workspace configurations that are specific to cloud providers. */ + /** */ @JsonProperty("cloud_resource_container") private CloudResourceContainer cloudResourceContainer; @@ -62,31 +62,11 @@ public class Workspace { @JsonProperty("external_customer_info") private ExternalCustomerInfo externalCustomerInfo; - /** - * The network settings for the workspace. The configurations are only for Databricks-managed - * VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the - * IP range configurations must be mutually exclusive. An attempt to create a workspace fails if - * Databricks detects an IP range overlap. - * - *

Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, - * and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, - * `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - * - *

The sizes of these IP ranges affect the maximum number of nodes for the workspace. - * - *

**Important**: Confirm the IP ranges used by your Databricks workspace before creating the - * workspace. You cannot change them after your workspace is deployed. If the IP address ranges - * for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to - * fail. To determine the address range sizes that you need, Databricks provides a calculator as a - * Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - * - *

[calculate subnet sizes for a new workspace]: - * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html - */ + /** */ @JsonProperty("gcp_managed_network_config") private GcpManagedNetworkConfig gcpManagedNetworkConfig; - /** The configurations for the GKE cluster of a Databricks workspace. */ + /** */ @JsonProperty("gke_config") private GkeConfig gkeConfig; @@ -112,11 +92,7 @@ public class Workspace { @JsonProperty("network_id") private String networkId; - /** - * The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - * - *

[AWS Pricing]: https://databricks.com/product/aws-pricing - */ + /** */ @JsonProperty("pricing_tier") private PricingTier pricingTier; @@ -150,10 +126,7 @@ public class Workspace { @JsonProperty("workspace_name") private String workspaceName; - /** - * The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` - * initially. Continue to check the status until the status is `RUNNING`. - */ + /** */ @JsonProperty("workspace_status") private WorkspaceStatus workspaceStatus; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java index e700dac93..61557b7dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java @@ -41,7 +41,7 @@ public void delete(DeleteWorkspaceRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -82,7 +82,7 @@ public void update(UpdateWorkspaceRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java index 0880dbd86..eea8ba539 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java @@ -39,7 +39,7 @@ public void deleteQualityMonitor(DeleteQualityMonitorRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteQualityMonitorResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java index 5c1066f20..c39679117 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java @@ -14,12 +14,19 @@ public class AiGatewayRateLimit { private Long calls; /** - * Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with - * 'endpoint' being the default if not specified. + * Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' + * are supported, with 'endpoint' being the default if not specified. */ @JsonProperty("key") private AiGatewayRateLimitKey key; + /** + * Principal field for a user, user group, or service principal to apply rate limiting to. Accepts + * a user email, group name, or service principal application ID. + */ + @JsonProperty("principal") + private String principal; + /** Renewal period field for a rate limit. Currently, only 'minute' is supported. */ @JsonProperty("renewal_period") private AiGatewayRateLimitRenewalPeriod renewalPeriod; @@ -42,6 +49,15 @@ public AiGatewayRateLimitKey getKey() { return key; } + public AiGatewayRateLimit setPrincipal(String principal) { + this.principal = principal; + return this; + } + + public String getPrincipal() { + return principal; + } + public AiGatewayRateLimit setRenewalPeriod(AiGatewayRateLimitRenewalPeriod renewalPeriod) { this.renewalPeriod = renewalPeriod; return this; @@ -58,12 +74,13 @@ public boolean equals(Object o) { AiGatewayRateLimit that = (AiGatewayRateLimit) o; return Objects.equals(calls, that.calls) && Objects.equals(key, that.key) + && Objects.equals(principal, that.principal) && Objects.equals(renewalPeriod, that.renewalPeriod); } @Override public int hashCode() { - return Objects.hash(calls, key, renewalPeriod); + return Objects.hash(calls, key, principal, renewalPeriod); } @Override @@ -71,6 +88,7 @@ public String toString() { return new ToStringer(AiGatewayRateLimit.class) .add("calls", calls) .add("key", key) + .add("principal", principal) .add("renewalPeriod", renewalPeriod) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java index a2870c1f5..3b731847b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java @@ -10,6 +10,12 @@ public enum AiGatewayRateLimitKey { @JsonProperty("endpoint") ENDPOINT, + @JsonProperty("service_principal") + SERVICE_PRINCIPAL, + @JsonProperty("user") USER, + + @JsonProperty("user_group") + USER_GROUP, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java deleted file mode 100755 index 94d8eb1f5..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.serving; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java index 129841ac9..b2764b5c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java @@ -52,10 +52,7 @@ public class ServedEntityOutput { @JsonProperty("external_model") private ExternalModel externalModel; - /** - * All fields are not sensitive as they are hard-coded in the system and made available to - * customers. - */ + /** */ @JsonProperty("foundation_model") private FoundationModel foundationModel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java index 8ed57eb23..ec9eb293f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java @@ -21,10 +21,7 @@ public class ServedEntitySpec { @JsonProperty("external_model") private ExternalModel externalModel; - /** - * All fields are not sensitive as they are hard-coded in the system and made available to - * customers. - */ + /** */ @JsonProperty("foundation_model") private FoundationModel foundationModel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java index a1c66a4f3..7b120c29b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java @@ -13,7 +13,7 @@ public class ServingEndpointAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java index 5f2f50a35..eb682a764 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java @@ -18,7 +18,7 @@ public class ServingEndpointPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java index 6d4100238..371c994ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermissionsDescription.java @@ -13,7 +13,7 @@ public class ServingEndpointPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java index a6506b9dc..65de06770 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java @@ -67,7 +67,7 @@ public void delete(DeleteServingEndpointRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java index eedc75e39..1346a7a43 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java @@ -40,7 +40,7 @@ public void delete(DeleteAccountIpAccessListRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -85,7 +85,7 @@ public void replace(ReplaceIpAccessList request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ReplaceResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -101,7 +101,7 @@ public void update(UpdateIpAccessList request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java index 751529e58..549f0f9a1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java @@ -17,13 +17,7 @@ public class ClusterAutoRestartMessage { @JsonProperty("enabled") private Boolean enabled; - /** - * Contains an information about the enablement status judging (e.g. whether the enterprise tier - * is enabled) This is only additional information that MUST NOT be used to decide whether the - * setting is enabled or not. This is intended to use only for purposes like showing an error - * message to the customer with the additional details. For example, using these details we can - * check why exactly the feature is disabled for this customer. - */ + /** */ @JsonProperty("enablement_details") private ClusterAutoRestartMessageEnablementDetails enablementDetails; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java index 266faa291..905f052f9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileSetting.java @@ -9,7 +9,7 @@ @Generated public class ComplianceSecurityProfileSetting { - /** SHIELD feature: CSP */ + /** */ @JsonProperty("compliance_security_profile_workspace") private ComplianceSecurityProfile complianceSecurityProfileWorkspace; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java index 0cfbedf56..1f2a62deb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java @@ -19,13 +19,7 @@ public class CreateIpAccessList { @JsonProperty("label") private String label; - /** - * Type of IP access list. Valid values are as follows and are case-sensitive: - * - *

* `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP - * or range. IP addresses in the block list are excluded even if they are included in an allow - * list. - */ + /** */ @JsonProperty("list_type") private ListType listType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java index dba141ff5..8d51f57fc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java @@ -10,7 +10,7 @@ /** An IP access list was successfully created. */ @Generated public class CreateIpAccessListResponse { - /** Definition of an IP Access list */ + /** */ @JsonProperty("ip_access_list") private IpAccessListInfo ipAccessList; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java index 85ca6762f..6a4aa431c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java @@ -9,7 +9,7 @@ @Generated public class CreateNetworkConnectivityConfigRequest { - /** Properties of the new network connectivity configuration. */ + /** */ @JsonProperty("network_connectivity_config") private CreateNetworkConnectivityConfiguration networkConnectivityConfig; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java index dc6876eb9..1c33e6191 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java @@ -9,7 +9,7 @@ @Generated public class CreateNetworkPolicyRequest { - /** */ + /** Network policy configuration details. */ @JsonProperty("network_policy") private AccountNetworkPolicy networkPolicy; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java index fc369b6c2..57ce53f8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java @@ -13,10 +13,7 @@ public class CreatePrivateEndpointRuleRequest { /** Your Network Connectivity Configuration ID. */ @JsonIgnore private String networkConnectivityConfigId; - /** - * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure - * portal after initialization. - */ + /** */ @JsonProperty("private_endpoint_rule") private CreatePrivateEndpointRule privateEndpointRule; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java index 83ee147bf..7697448b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java @@ -9,7 +9,7 @@ @Generated public class CspEnablementAccountSetting { - /** Account level policy for CSP */ + /** */ @JsonProperty("csp_enablement_account") private CspEnablementAccount cspEnablementAccount; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java new file mode 100755 index 000000000..d92f69459 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DefaultWarehouseId { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + /** */ + @JsonProperty("string_val") + private StringMessage stringVal; + + public DefaultWarehouseId setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DefaultWarehouseId setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + public DefaultWarehouseId setStringVal(StringMessage stringVal) { + this.stringVal = stringVal; + return this; + } + + public StringMessage getStringVal() { + return stringVal; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DefaultWarehouseId that = (DefaultWarehouseId) o; + return Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName) + && Objects.equals(stringVal, that.stringVal); + } + + @Override + public int hashCode() { + return Objects.hash(etag, settingName, stringVal); + } + + @Override + public String toString() { + return new ToStringer(DefaultWarehouseId.class) + .add("etag", etag) + .add("settingName", settingName) + .add("stringVal", stringVal) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java new file mode 100755 index 000000000..41ea96633 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java @@ -0,0 +1,56 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and + * can be overridden by users. + */ +@Generated +public class DefaultWarehouseIdAPI { + private static final Logger LOG = LoggerFactory.getLogger(DefaultWarehouseIdAPI.class); + + private final DefaultWarehouseIdService impl; + + /** Regular-use constructor */ + public DefaultWarehouseIdAPI(ApiClient apiClient) { + impl = new DefaultWarehouseIdImpl(apiClient); + } + + /** Constructor for mocks */ + public DefaultWarehouseIdAPI(DefaultWarehouseIdService mock) { + impl = mock; + } + + /** Reverts the Default Warehouse Id setting to its default value. */ + public DeleteDefaultWarehouseIdResponse delete(DeleteDefaultWarehouseIdRequest request) { + return impl.delete(request); + } + + /** Gets the Default Warehouse Id setting. */ + public DefaultWarehouseId get(GetDefaultWarehouseIdRequest request) { + return impl.get(request); + } + + public DefaultWarehouseId update( + boolean allowMissing, DefaultWarehouseId setting, String fieldMask) { + return update( + new UpdateDefaultWarehouseIdRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** Updates the Default Warehouse Id setting. */ + public DefaultWarehouseId update(UpdateDefaultWarehouseIdRequest request) { + return impl.update(request); + } + + public DefaultWarehouseIdService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java new file mode 100755 index 000000000..b88356619 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of DefaultWarehouseId */ +@Generated +class DefaultWarehouseIdImpl implements DefaultWarehouseIdService { + private final ApiClient apiClient; + + public DefaultWarehouseIdImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DeleteDefaultWarehouseIdResponse delete(DeleteDefaultWarehouseIdRequest request) { + String path = "/api/2.0/settings/types/default_warehouse_id/names/default"; + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteDefaultWarehouseIdResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DefaultWarehouseId get(GetDefaultWarehouseIdRequest request) { + String path = "/api/2.0/settings/types/default_warehouse_id/names/default"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DefaultWarehouseId.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DefaultWarehouseId update(UpdateDefaultWarehouseIdRequest request) { + String path = "/api/2.0/settings/types/default_warehouse_id/names/default"; + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DefaultWarehouseId.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java new file mode 100755 index 000000000..fdc487b50 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java @@ -0,0 +1,25 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and + * can be overridden by users. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface DefaultWarehouseIdService { + /** Reverts the Default Warehouse Id setting to its default value. */ + DeleteDefaultWarehouseIdResponse delete( + DeleteDefaultWarehouseIdRequest deleteDefaultWarehouseIdRequest); + + /** Gets the Default Warehouse Id setting. */ + DefaultWarehouseId get(GetDefaultWarehouseIdRequest getDefaultWarehouseIdRequest); + + /** Updates the Default Warehouse Id setting. */ + DefaultWarehouseId update(UpdateDefaultWarehouseIdRequest updateDefaultWarehouseIdRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java new file mode 100755 index 000000000..9867c4f60 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteDefaultWarehouseIdRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteDefaultWarehouseIdRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDefaultWarehouseIdRequest that = (DeleteDefaultWarehouseIdRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDefaultWarehouseIdRequest.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java new file mode 100755 index 000000000..8da0557b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +public class DeleteDefaultWarehouseIdResponse { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonProperty("etag") + private String etag; + + public DeleteDefaultWarehouseIdResponse setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDefaultWarehouseIdResponse that = (DeleteDefaultWarehouseIdResponse) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDefaultWarehouseIdResponse.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java deleted file mode 100755 index b8bc53d67..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java index 37029d2c5..82c1fd5fc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java @@ -24,13 +24,7 @@ public class EgressNetworkPolicyInternetAccessPolicy { @JsonProperty("log_only_mode") private EgressNetworkPolicyInternetAccessPolicyLogOnlyMode logOnlyMode; - /** - * At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: - * Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can - * only access explicitly allowed internet and storage destinations, as well as UC connections and - * external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via - * private link. - */ + /** */ @JsonProperty("restriction_mode") private EgressNetworkPolicyInternetAccessPolicyRestrictionMode restrictionMode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java index 6c97950e2..5fa93fd1c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java @@ -18,12 +18,7 @@ public class EgressNetworkPolicyInternetAccessPolicyInternetDestination { @JsonProperty("destination") private String destination; - /** - * The filtering protocol used by the DP. For private and public preview, SEG will only support - * TCP filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will - * be set to TCP by default and hidden from the user. In the future, users may be able to select - * HTTP filtering (i.e. SNI based filtering, filtering by FQDN). - */ + /** */ @JsonProperty("protocol") private EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java deleted file mode 100755 index 43f12c991..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class Empty { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(Empty.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java index c0b0a4544..5a7f6e6ae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java @@ -9,7 +9,7 @@ @Generated public class EnhancedSecurityMonitoringSetting { - /** SHIELD feature: ESM */ + /** */ @JsonProperty("enhanced_security_monitoring_workspace") private EnhancedSecurityMonitoring enhancedSecurityMonitoringWorkspace; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java index 9dddd8ab0..999819258 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java @@ -9,7 +9,7 @@ @Generated public class EsmEnablementAccountSetting { - /** Account level policy for ESM */ + /** */ @JsonProperty("esm_enablement_account") private EsmEnablementAccount esmEnablementAccount; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java index 45e0c9c3a..4e74c0b8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java @@ -10,7 +10,7 @@ /** An IP access list was successfully returned. */ @Generated public class FetchIpAccessListResponse { - /** Definition of an IP Access list */ + /** */ @JsonProperty("ip_access_list") private IpAccessListInfo ipAccessList; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java new file mode 100755 index 000000000..9e2b62400 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetDefaultWarehouseIdRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetDefaultWarehouseIdRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDefaultWarehouseIdRequest that = (GetDefaultWarehouseIdRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetDefaultWarehouseIdRequest.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java index 88afa428f..47925b4bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java @@ -9,7 +9,7 @@ @Generated public class GetIpAccessListResponse { - /** Definition of an IP Access list */ + /** */ @JsonProperty("ip_access_list") private IpAccessListInfo ipAccessList; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java index c2ac51992..439e4055e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java @@ -39,13 +39,7 @@ public class IpAccessListInfo { @JsonProperty("list_id") private String listId; - /** - * Type of IP access list. Valid values are as follows and are case-sensitive: - * - *

* `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP - * or range. IP addresses in the block list are excluded even if they are included in an allow - * list. - */ + /** */ @JsonProperty("list_type") private ListType listType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java index b9cca1598..f70522d2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteIpAccessListRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -74,7 +74,7 @@ public void replace(ReplaceIpAccessList request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ReplaceResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -87,7 +87,7 @@ public void update(UpdateIpAccessList request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java index e46162f5d..d4f40b77f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java @@ -10,17 +10,11 @@ /** Default rules don't have specific targets. */ @Generated public class NccEgressDefaultRules { - /** - * The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to - * allow traffic from your Databricks workspace. - */ + /** */ @JsonProperty("aws_stable_ip_rule") private NccAwsStableIpRule awsStableIpRule; - /** - * The stable Azure service endpoints. You can configure the firewall of your Azure resources to - * allow traffic from your Databricks serverless compute resources. - */ + /** */ @JsonProperty("azure_service_endpoint_rule") private NccAzureServiceEndpointRule azureServiceEndpointRule; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java index 4bd996e8a..7884cf74f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java @@ -64,7 +64,7 @@ public void deleteNetworkConnectivityConfiguration( Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteNetworkConnectivityConfigurationResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java index 1f9f29054..ee4e9a8ba 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java @@ -41,7 +41,7 @@ public void deleteNetworkPolicyRpc(DeleteNetworkPolicyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteNetworkPolicyRpcResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java index 498afdf6b..ab692d437 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteNotificationDestinationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Empty.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java index 5dfe405f2..b0167e23d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java @@ -9,13 +9,7 @@ @Generated public class PersonalComputeMessage { - /** - * ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing - * all users to create single-machine compute resources. DELEGATE: Moves access control for the - * Personal Compute default policy to individual workspaces and requires a workspace’s users or - * groups to be added to the ACLs of that workspace’s Personal Compute default policy before they - * will be able to create compute resources through that policy. - */ + /** */ @JsonProperty("value") private PersonalComputeMessageEnum value; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java index b7c9d65e1..82be25f3a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java @@ -27,13 +27,7 @@ public class ReplaceIpAccessList { @JsonProperty("label") private String label; - /** - * Type of IP access list. Valid values are as follows and are case-sensitive: - * - *

* `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP - * or range. IP addresses in the block list are excluded even if they are included in an allow - * list. - */ + /** */ @JsonProperty("list_type") private ListType listType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java deleted file mode 100755 index 5a3ba7e51..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class ReplaceResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(ReplaceResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java deleted file mode 100755 index 63ac738c7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class RevokeTokenResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(RevokeTokenResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java deleted file mode 100755 index 6d4a07fab..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SetStatusResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SetStatusResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index 16fa226ef..6e7933a7c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -25,6 +25,8 @@ public class SettingsAPI { private DefaultNamespaceAPI defaultNamespaceAPI; + private DefaultWarehouseIdAPI defaultWarehouseIdAPI; + private DisableLegacyAccessAPI disableLegacyAccessAPI; private DisableLegacyDbfsAPI disableLegacyDbfsAPI; @@ -60,6 +62,8 @@ public SettingsAPI(ApiClient apiClient) { defaultNamespaceAPI = new DefaultNamespaceAPI(apiClient); + defaultWarehouseIdAPI = new DefaultWarehouseIdAPI(apiClient); + disableLegacyAccessAPI = new DisableLegacyAccessAPI(apiClient); disableLegacyDbfsAPI = new DisableLegacyDbfsAPI(apiClient); @@ -123,6 +127,11 @@ public DefaultNamespaceAPI DefaultNamespace() { return defaultNamespaceAPI; } + /** Warehouse to be selected by default for users in this workspace. */ + public DefaultWarehouseIdAPI DefaultWarehouseId() { + return defaultWarehouseIdAPI; + } + /** 'Disabling legacy access' has the following impacts: 1. */ public DisableLegacyAccessAPI DisableLegacyAccess() { return disableLegacyAccessAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java index 692f87aee..48af63a1d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java @@ -13,7 +13,7 @@ public class TokenAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java index 8264976e5..0303b550d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteTokenManagementRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java index 013077476..470a7e2e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java @@ -18,7 +18,7 @@ public class TokenPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java index 1ac62cbf5..c65b7c12a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermissionsDescription.java @@ -13,7 +13,7 @@ public class TokenPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java index ed323fd5b..c1f979ecd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java @@ -38,7 +38,7 @@ public void delete(RevokeTokenRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, RevokeTokenResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java index 3e26425dc..b4402d001 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java @@ -28,15 +28,7 @@ public class UpdateDefaultNamespaceSettingRequest { @JsonProperty("field_mask") private String fieldMask; - /** - * This represents the setting configuration for the default namespace in the Databricks - * workspace. Setting the default catalog for the workspace determines the catalog that is used - * when queries do not reference a fully qualified 3 level name. For example, if the default - * catalog is set to 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object - * 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a - * restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only - * applies when using Unity Catalog-enabled compute. - */ + /** */ @JsonProperty("setting") private DefaultNamespaceSetting setting; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWarehouseIdRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWarehouseIdRequest.java new file mode 100755 index 000000000..623254b06 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWarehouseIdRequest.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateDefaultWarehouseIdRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * The field mask must be a single string, with multiple fields separated by commas (no spaces). + * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not + * allowed, as only the entire collection field can be specified. Field names must exactly match + * the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private DefaultWarehouseId setting; + + public UpdateDefaultWarehouseIdRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateDefaultWarehouseIdRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateDefaultWarehouseIdRequest setSetting(DefaultWarehouseId setting) { + this.setting = setting; + return this; + } + + public DefaultWarehouseId getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDefaultWarehouseIdRequest that = (UpdateDefaultWarehouseIdRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateDefaultWarehouseIdRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java index 20c17976f..f636af0ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java @@ -27,13 +27,7 @@ public class UpdateIpAccessList { @JsonProperty("label") private String label; - /** - * Type of IP access list. Valid values are as follows and are case-sensitive: - * - *

* `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP - * or range. IP addresses in the block list are excluded even if they are included in an allow - * list. - */ + /** */ @JsonProperty("list_type") private ListType listType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java index a8b5df2c4..defd2840b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java @@ -17,10 +17,7 @@ public class UpdateNccPrivateEndpointRuleRequest { */ @JsonIgnore private String networkConnectivityConfigId; - /** - * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure - * portal after initialization. - */ + /** */ @JsonProperty("private_endpoint_rule") private UpdatePrivateEndpointRule privateEndpointRule; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java index 54e9f3e6f..708f50a89 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java @@ -10,7 +10,7 @@ @Generated public class UpdateNetworkPolicyRequest { - /** */ + /** Updated network policy configuration details. */ @JsonProperty("network_policy") private AccountNetworkPolicy networkPolicy; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java deleted file mode 100755 index 9ea1a5162..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java index 708394972..51d55de20 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java @@ -13,7 +13,7 @@ public class UpdateWorkspaceNetworkOptionRequest { /** The workspace ID. */ @JsonIgnore private Long workspaceId; - /** */ + /** The network option details for the workspace. */ @JsonProperty("workspace_network_option") private WorkspaceNetworkOption workspaceNetworkOption; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java index b0f3313e7..542281a3c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java @@ -32,7 +32,7 @@ public void setStatus(Map request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, SetStatusResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java index 59c3475f5..955882109 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java @@ -10,7 +10,7 @@ @Generated public class CreateFederationPolicyRequest { - /** */ + /** Name of the policy. This is the name of the policy to be created. */ @JsonProperty("policy") private FederationPolicy policy; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java index f534903c8..6d58d807d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java @@ -9,7 +9,7 @@ @Generated public class CreateProvider { - /** The delta sharing authentication type. */ + /** */ @JsonProperty("authentication_type") private AuthenticationType authenticationType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java index b7589d41f..8819ada3e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java @@ -9,7 +9,7 @@ @Generated public class CreateRecipient { - /** The delta sharing authentication type. */ + /** */ @JsonProperty("authentication_type") private AuthenticationType authenticationType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java deleted file mode 100755 index 56f100810..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java index 93166cac2..34391efca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java @@ -10,11 +10,11 @@ /** Represents a UC dependency. */ @Generated public class DeltaSharingDependency { - /** A Function in UC as a dependency. */ + /** */ @JsonProperty("function") private DeltaSharingFunctionDependency function; - /** A Table in UC as a dependency. */ + /** */ @JsonProperty("table") private DeltaSharingTableDependency table; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java deleted file mode 100755 index 3b88fa15e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class GetActivationUrlInfoResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(GetActivationUrlInfoResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java index 4b5999d5f..d82783885 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java @@ -9,7 +9,7 @@ @Generated public class ProviderInfo { - /** The delta sharing authentication type. */ + /** */ @JsonProperty("authentication_type") private AuthenticationType authenticationType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java index 273efebdb..64df6b02f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteProviderRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java index 59ca4edbf..4b89ce48c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java @@ -26,7 +26,7 @@ public void getActivationUrlInfo(GetActivationUrlInfoRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, GetActivationUrlInfoResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java index 03b5136cf..f7854d4c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java @@ -42,7 +42,7 @@ public void delete(DeleteFederationPolicyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java index 12cba1572..9eea6e6d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java @@ -24,7 +24,7 @@ public class RecipientInfo { @JsonProperty("activation_url") private String activationUrl; - /** The delta sharing authentication type. */ + /** */ @JsonProperty("authentication_type") private AuthenticationType authenticationType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java index afd12e0eb..bcb4c877d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteRecipientRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java index df926ca13..716fe7628 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java @@ -36,7 +36,7 @@ public void delete(DeleteShareRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java index e30b4cca6..5edaf9c6c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java @@ -6,6 +6,7 @@ @Generated public enum TableInternalAttributesSharedTableType { + DELTA_ICEBERG_TABLE, DIRECTORY_BASED_TABLE, FILE_BASED_TABLE, FOREIGN_TABLE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java index cbe402a72..c5be505f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java @@ -52,7 +52,12 @@ public class AlertV2 { @JsonProperty("query_text") private String queryText; - /** The run as username. This field is set to "Unavailable" if the user has been deleted. */ + /** + * The run as username or application ID of service principal. This field is set to "Unavailable" + * if the user has been deleted. On Create and Update, this field can be set to application ID of + * an active service principal. Setting this field requires the servicePrincipal/user role. If not + * specified it'll default to be request user. + */ @JsonProperty("run_as_user_name") private String runAsUserName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java index 33a394bf3..b88ca460c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java @@ -37,7 +37,7 @@ public void delete(TrashAlertRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Empty.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java index 286783571..a199dcfed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java @@ -38,7 +38,7 @@ public void delete(DeleteAlertsLegacyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -73,7 +73,7 @@ public void update(EditAlert request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java index b8379503e..7e8b01de6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java @@ -63,7 +63,7 @@ public void trashAlert(TrashAlertV2Request request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Empty.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java deleted file mode 100755 index 38b973542..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CancelExecutionResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CancelExecutionResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java index af89d90bb..71850c011 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java @@ -87,7 +87,7 @@ public class CreateWarehouseRequest { @JsonProperty("name") private String name; - /** Configurations whether the warehouse should use spot instances. */ + /** */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; @@ -100,10 +100,7 @@ public class CreateWarehouseRequest { @JsonProperty("tags") private EndpointTags tags; - /** - * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to - * `PRO` and also set the field `enable_serverless_compute` to `true`. - */ + /** */ @JsonProperty("warehouse_type") private CreateWarehouseRequestWarehouseType warehouseType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java index 2a4e3438d..514627737 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteDashboardWidgetRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java index fb45272bc..ec32f97a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteDashboardRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -77,7 +77,7 @@ public void restore(RestoreDashboardRequest request) { Request req = new Request("POST", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, RestoreResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java deleted file mode 100755 index 8395b3a12..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java deleted file mode 100755 index 17a6c84e4..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteWarehouseResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteWarehouseResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java index 726c96cd6..d07e8e25b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java @@ -90,7 +90,7 @@ public class EditWarehouseRequest { @JsonProperty("name") private String name; - /** Configurations whether the warehouse should use spot instances. */ + /** */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; @@ -103,10 +103,7 @@ public class EditWarehouseRequest { @JsonProperty("tags") private EndpointTags tags; - /** - * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to - * `PRO` and also set the field `enable_serverless_compute` to `true`. - */ + /** */ @JsonProperty("warehouse_type") private EditWarehouseRequestWarehouseType warehouseType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java deleted file mode 100755 index 0a222ec8a..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class EditWarehouseResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(EditWarehouseResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java deleted file mode 100755 index 1edb28c43..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java +++ /dev/null @@ -1,32 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -/** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm - * right now. - */ -@Generated -public class Empty { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(Empty.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java index ad4444a8a..243675c94 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java @@ -24,7 +24,7 @@ public class EndpointHealth { @JsonProperty("message") private String message; - /** Health status of the warehouse. */ + /** */ @JsonProperty("status") private Status status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java index bd56e2472..92868d39b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java @@ -110,11 +110,11 @@ public class EndpointInfo { @JsonProperty("odbc_params") private OdbcParams odbcParams; - /** Configurations whether the warehouse should use spot instances. */ + /** */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; - /** State of the warehouse */ + /** */ @JsonProperty("state") private State state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java index e0a414c5b..e7cb13ba1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java @@ -110,11 +110,11 @@ public class GetWarehouseResponse { @JsonProperty("odbc_params") private OdbcParams odbcParams; - /** Configurations whether the warehouse should use spot instances. */ + /** */ @JsonProperty("spot_instance_policy") private SpotInstancePolicy spotInstancePolicy; - /** State of the warehouse */ + /** */ @JsonProperty("state") private State state; @@ -127,10 +127,7 @@ public class GetWarehouseResponse { @JsonProperty("tags") private EndpointTags tags; - /** - * Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to - * `PRO` and also set the field `enable_serverless_compute` to `true`. - */ + /** */ @JsonProperty("warehouse_type") private GetWarehouseResponseWarehouseType warehouseType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java index 27f88fa53..c3bd03f24 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java @@ -37,7 +37,7 @@ public void delete(TrashQueryRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Empty.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java index cce4fb4eb..baaafee64 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteQueriesLegacyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -76,7 +76,7 @@ public void restore(RestoreQueriesLegacyRequest request) { Request req = new Request("POST", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, RestoreResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java index 0dacc2b8d..0a5a1dd26 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteVisualizationRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, Empty.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java index e62786295..179eab191 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteQueryVisualizationsLegacyRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java deleted file mode 100755 index 09f49de67..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class RestoreResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(RestoreResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java index d4189efa2..14c187208 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java @@ -19,7 +19,7 @@ public class ResultManifest { @JsonProperty("format") private Format format; - /** The schema is an ordered list of column descriptions. */ + /** */ @JsonProperty("schema") private ResultSchema schema; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java deleted file mode 100755 index 82b47fdba..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SetWorkspaceWarehouseConfigResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SetWorkspaceWarehouseConfigResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java deleted file mode 100755 index 32fc7af3c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class StartWarehouseResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(StartWarehouseResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java index e6b49df84..4c321af78 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java @@ -22,7 +22,7 @@ public void cancelExecution(CancelExecutionRequest request) { try { Request req = new Request("POST", path); ApiClient.setQuery(req, request); - apiClient.execute(req, CancelExecutionResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java index d9fb5469e..ec0b18178 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementResponse.java @@ -9,7 +9,7 @@ @Generated public class StatementResponse { - /** The result manifest provides schema and metadata for the result set. */ + /** */ @JsonProperty("manifest") private ResultManifest manifest; @@ -24,7 +24,7 @@ public class StatementResponse { @JsonProperty("statement_id") private String statementId; - /** The status response includes execution state and if relevant, error information. */ + /** */ @JsonProperty("status") private StatementStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java index e67cb876d..ddbfd8aa0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java @@ -14,13 +14,7 @@ public class StatementStatus { @JsonProperty("error") private ServiceError error; - /** - * Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - - * `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution - * failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; - * can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: - * execution successful, and statement closed; result no longer available for fetch - */ + /** */ @JsonProperty("state") private StatementState state; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java deleted file mode 100755 index e98e257e7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class StopWarehouseResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(StopWarehouseResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java deleted file mode 100755 index 2db9c2631..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sql; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java index 1dc83f0f6..796feb1b1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java @@ -13,7 +13,7 @@ public class WarehouseAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java index e8fd1f68e..8f9b84ddc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermission.java @@ -18,7 +18,7 @@ public class WarehousePermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java index 91241417b..dbf21b9db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousePermissionsDescription.java @@ -13,7 +13,7 @@ public class WarehousePermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java index d1fb0fda2..e1cc23638 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteWarehouseRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteWarehouseResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -51,7 +51,7 @@ public void edit(EditWarehouseRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, EditWarehouseResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -146,7 +146,7 @@ public void setWorkspaceWarehouseConfig(SetWorkspaceWarehouseConfigRequest reque ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, SetWorkspaceWarehouseConfigResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -159,7 +159,7 @@ public void start(StartRequest request) { Request req = new Request("POST", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, StartWarehouseResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -172,7 +172,7 @@ public void stop(StopRequest request) { Request req = new Request("POST", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, StopWarehouseResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java index 0856cbced..8c688ec23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java @@ -21,13 +21,7 @@ public class CreateVectorIndexRequest { @JsonProperty("endpoint_name") private String endpointName; - /** - * There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs - * with a source Delta Table, automatically and incrementally updating the index as the underlying - * data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and - * write of vectors and metadata through our REST and SDK APIs. With this model, the user manages - * index updates. - */ + /** */ @JsonProperty("index_type") private VectorIndexType indexType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java deleted file mode 100755 index 1dd487c16..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.vectorsearch; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteEndpointResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteEndpointResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java deleted file mode 100755 index 0acf66aac..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.vectorsearch; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteIndexResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteIndexResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java index a44466f2f..fbf2e8965 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java @@ -17,13 +17,7 @@ public class MiniVectorIndex { @JsonProperty("endpoint_name") private String endpointName; - /** - * There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs - * with a source Delta Table, automatically and incrementally updating the index as the underlying - * data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and - * write of vectors and metadata through our REST and SDK APIs. With this model, the user manages - * index updates. - */ + /** */ @JsonProperty("index_type") private VectorIndexType indexType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java deleted file mode 100755 index f2c937b76..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.vectorsearch; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class SyncIndexResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(SyncIndexResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java index f5922390d..a823544d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java @@ -25,13 +25,7 @@ public class VectorIndex { @JsonProperty("endpoint_name") private String endpointName; - /** - * There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs - * with a source Delta Table, automatically and incrementally updating the index as the underlying - * data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and - * write of vectors and metadata through our REST and SDK APIs. With this model, the user manages - * index updates. - */ + /** */ @JsonProperty("index_type") private VectorIndexType indexType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java index a79ee43ba..063e46d73 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java @@ -37,7 +37,7 @@ public void deleteEndpoint(DeleteEndpointRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteEndpointResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java index 7933132f3..746a4999e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java @@ -51,7 +51,7 @@ public void deleteIndex(DeleteIndexRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteIndexResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -133,7 +133,7 @@ public void syncIndex(SyncIndexRequest request) { Request req = new Request("POST", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, SyncIndexResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java index 932d9ace1..268fa95ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java @@ -7,6 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * An item representing an ACL rule applied to the given principal (user or group) on the associated + * scope point. + */ @Generated public class AclItem { /** The permission level applied to the principal. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclPermission.java index df20b8b6e..b38e35cf1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclPermission.java @@ -4,6 +4,7 @@ import com.databricks.sdk.support.Generated; +/** The ACL permission levels for Secret ACLs applied to secret scopes. */ @Generated public enum AclPermission { MANAGE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java index a2acbeb21..d2ec8ea6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** The metadata of the Azure KeyVault for a secret scope of type `AZURE_KEYVAULT` */ @Generated public class AzureKeyVaultSecretScopeMetadata { /** The DNS of the KeyVault */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java index afee4ed59..b66751139 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java @@ -9,11 +9,11 @@ @Generated public class CreateScope { - /** The metadata for the secret scope if the type is `AZURE_KEYVAULT` */ + /** The metadata for the secret scope if the type is ``AZURE_KEYVAULT`` */ @JsonProperty("backend_azure_keyvault") private AzureKeyVaultSecretScopeMetadata backendAzureKeyvault; - /** The principal that is initially granted `MANAGE` permission to the created scope. */ + /** The principal that is initially granted ``MANAGE`` permission to the created scope. */ @JsonProperty("initial_manage_principal") private String initialManagePrincipal; @@ -22,7 +22,8 @@ public class CreateScope { private String scope; /** - * The backend type the scope will be created with. If not specified, will default to `DATABRICKS` + * The backend type the scope will be created with. If not specified, will default to + * ``DATABRICKS`` */ @JsonProperty("scope_backend_type") private ScopeBackendType scopeBackendType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java deleted file mode 100755 index bbfc3ed9c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class CreateScopeResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(CreateScopeResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java deleted file mode 100755 index e02226127..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteAclResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteAclResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java deleted file mode 100755 index 3b1fb2ec7..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteCredentialsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteCredentialsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java deleted file mode 100755 index 2fafce6f5..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteRepoResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteRepoResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java deleted file mode 100755 index f0b7586b9..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java deleted file mode 100755 index 4aa4c998b..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteScopeResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteScopeResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java deleted file mode 100755 index 1ebe0ea0f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class DeleteSecretResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(DeleteSecretResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java index 7a6282ef0..2bac2af61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java @@ -10,12 +10,12 @@ @Generated public class GetSecretRequest { - /** The key to fetch secret for. */ + /** Name of the secret to fetch value information. */ @JsonIgnore @QueryParam("key") private String key; - /** The name of the scope to fetch secret information from. */ + /** The name of the scope that contains the secret. */ @JsonIgnore @QueryParam("scope") private String scope; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java index 64260be1e..f0f261d2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteCredentialsRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteCredentialsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -76,7 +76,7 @@ public void update(UpdateCredentialsRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateCredentialsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java deleted file mode 100755 index aac0f4f57..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class ImportResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(ImportResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java deleted file mode 100755 index 4bfe6ad7b..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class MkdirsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(MkdirsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java deleted file mode 100755 index 197b3373f..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class PutAclResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(PutAclResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java deleted file mode 100755 index 83efb2e8e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class PutSecretResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(PutSecretResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java index 1e507c3d6..bc915bc40 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java @@ -13,7 +13,7 @@ public class RepoAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java index 2ec71c3bf..98d197d3d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermission.java @@ -18,7 +18,7 @@ public class RepoPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java index fce7b9207..99ab8a81c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoPermissionsDescription.java @@ -13,7 +13,7 @@ public class RepoPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java index aa7b08443..f8ebe2792 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java @@ -37,7 +37,7 @@ public void delete(DeleteRepoRequest request) { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - apiClient.execute(req, DeleteRepoResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -119,7 +119,7 @@ public void update(UpdateRepoRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, UpdateRepoResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ScopeBackendType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ScopeBackendType.java index e51d3127a..3506c284a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ScopeBackendType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ScopeBackendType.java @@ -4,6 +4,10 @@ import com.databricks.sdk.support.Generated; +/** + * The types of secret scope backends in the Secret Manager. Azure KeyVault backed secret scopes + * will be supported in a later release. + */ @Generated public enum ScopeBackendType { AZURE_KEYVAULT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java index 2883feb1c..61b3d8e06 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretMetadata.java @@ -7,6 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * The metadata about a secret. Returned when listing secrets. Does not contain the actual secret + * value. + */ @Generated public class SecretMetadata { /** A unique name to identify the secret. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java index b86511c9d..35c00938d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretScope.java @@ -7,13 +7,18 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * An organizational resource for storing secrets. Secret scopes can be different types + * (Databricks-managed, Azure KeyVault backed, etc), and ACLs can be applied to control permissions + * for all secrets within a scope. + */ @Generated public class SecretScope { /** The type of secret scope backend. */ @JsonProperty("backend_type") private ScopeBackendType backendType; - /** The metadata for the secret scope if the type is `AZURE_KEYVAULT` */ + /** The metadata for the secret scope if the type is ``AZURE_KEYVAULT`` */ @JsonProperty("keyvault_metadata") private AzureKeyVaultSecretScopeMetadata keyvaultMetadata; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java index f3c1398db..a050fae0a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java @@ -39,8 +39,39 @@ public void createScope(String scope) { } /** - * The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and - * may not exceed 128 characters. + * Creates a new secret scope. + * + *

The scope name must consist of alphanumeric characters, dashes, underscores, and periods, + * and may not exceed 128 characters. + * + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-simple-databricks-scope", "initial_manage_principal": "users" + * "scope_backend_type": "databricks|azure_keyvault", # below is only required if scope type is + * azure_keyvault "backend_azure_keyvault": { "resource_id": + * "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/xxxx/providers/Microsoft.KeyVault/vaults/xxxx", + * "tenant_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "dns_name": + * "https://xxxx.vault.azure.net/", } } + * + *

If ``initial_manage_principal`` is specified, the initial ACL applied to the scope is + * applied to the supplied principal (user or group) with ``MANAGE`` permissions. The only + * supported principal for this option is the group ``users``, which contains all users in the + * workspace. If ``initial_manage_principal`` is not specified, the initial ACL with ``MANAGE`` + * permission applied to the scope is assigned to the API request issuer's user identity. + * + *

If ``scope_backend_type`` is ``azure_keyvault``, a secret scope is created with secrets from + * a given Azure KeyVault. The caller must provide the keyvault_resource_id and the tenant_id for + * the key vault. If ``scope_backend_type`` is ``databricks`` or is unspecified, an empty secret + * scope is created and stored in Databricks's own storage. + * + *

Throws ``RESOURCE_ALREADY_EXISTS`` if a scope with the given name already exists. Throws + * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of scopes in the workspace is exceeded. Throws + * ``INVALID_PARAMETER_VALUE`` if the scope name is invalid. Throws ``BAD_REQUEST`` if request + * violated constraints. Throws ``CUSTOMER_UNAUTHORIZED`` if normal user attempts to create a + * scope with name reserved for databricks internal usage. Throws ``UNAUTHENTICATED`` if unable to + * verify user access permission on Azure KeyVault */ public void createScope(CreateScope request) { impl.createScope(request); @@ -53,9 +84,17 @@ public void deleteAcl(String scope, String principal) { /** * Deletes the given ACL on the given scope. * - *

Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` - * if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does - * not have permission to make this API call. + *

Users must have the ``MANAGE`` permission to invoke this API. + * + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-secret-scope", "principal": "data-scientists" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope, principal, or ACL exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. */ public void deleteAcl(DeleteAcl request) { impl.deleteAcl(request); @@ -68,8 +107,15 @@ public void deleteScope(String scope) { /** * Deletes a secret scope. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if - * the user does not have permission to make this API call. + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-secret-scope" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if the scope does not exist. Throws ``PERMISSION_DENIED`` + * if the user does not have permission to make this API call. Throws ``BAD_REQUEST`` if system + * user attempts to delete internal secret scope. */ public void deleteScope(DeleteScope request) { impl.deleteScope(request); @@ -80,11 +126,19 @@ public void deleteSecret(String scope, String key) { } /** - * Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on - * the secret scope. + * Deletes the secret stored in this secret scope. You must have ``WRITE`` or ``MANAGE`` + * permission on the Secret Scope. + * + *

Example request: * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws - * `PERMISSION_DENIED` if the user does not have permission to make this API call. + *

.. code:: + * + *

{ "scope": "my-secret-scope", "key": "my-secret-key" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + * ``BAD_REQUEST`` if system user attempts to delete an internal secret, or request is made + * against Azure KeyVault backed scope. */ public void deleteSecret(DeleteSecret request) { impl.deleteSecret(request); @@ -95,11 +149,19 @@ public AclItem getAcl(String scope, String principal) { } /** - * Gets the details about the given ACL, such as the group and permission. Users must have the - * `MANAGE` permission to invoke this API. + * Describes the details about the given ACL, such as the group and permission. + * + *

Users must have the ``MANAGE`` permission to invoke this API. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` - * if the user does not have permission to make this API call. + *

Example response: + * + *

.. code:: + * + *

{ "principal": "data-scientists", "permission": "READ" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. */ public AclItem getAcl(GetAclRequest request) { return impl.getAcl(request); @@ -110,15 +172,30 @@ public GetSecretResponse getSecret(String scope, String key) { } /** - * Gets the bytes representation of a secret value for the specified scope and key. + * Gets a secret for a given key and scope. This API can only be called from the DBUtils + * interface. Users need the READ permission to make this call. + * + *

Example response: + * + *

.. code:: * - *

Users need the READ permission to make this call. + *

{ "key": "my-string-key", "value": } * *

Note that the secret value returned is in bytes. The interpretation of the bytes is * determined by the caller in DBUtils and the type the data is decoded into. * - *

Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. - * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. + * + *

Note: This is explicitly an undocumented API. It also doesn't need to be supported for the + * /preview prefix, because it's not a customer-facing API (i.e. only used for DBUtils SecretUtils + * to fetch secrets). + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws + * ``BAD_REQUEST`` if normal user calls get secret outside of a notebook. AKV specific errors: + * Throws ``INVALID_PARAMETER_VALUE`` if secret name is not alphanumeric or too long. Throws + * ``PERMISSION_DENIED`` if secret manager cannot access AKV with 403 error Throws + * ``MALFORMED_REQUEST`` if secret manager cannot access AKV with any other 4xx error */ public GetSecretResponse getSecret(GetSecretRequest request) { return impl.getSecret(request); @@ -129,11 +206,19 @@ public Iterable listAcls(String scope) { } /** - * List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this - * API. + * Lists the ACLs set on the given scope. + * + *

Users must have the ``MANAGE`` permission to invoke this API. + * + *

Example response: + * + *

.. code:: * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` - * if the user does not have permission to make this API call. + *

{ "acls": [{ "principal": "admins", "permission": "MANAGE" },{ "principal": + * "data-scientists", "permission": "READ" }] } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ public Iterable listAcls(ListAclsRequest request) { return new Paginator<>(request, impl::listAcls, ListAclsResponse::getItems, response -> null); @@ -142,7 +227,14 @@ public Iterable listAcls(ListAclsRequest request) { /** * Lists all secret scopes available in the workspace. * - *

Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. + *

Example response: + * + *

.. code:: + * + *

{ "scopes": [{ "name": "my-databricks-scope", "backend_type": "DATABRICKS" },{ "name": + * "mount-points", "backend_type": "DATABRICKS" }] } + * + *

Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ public Iterable listScopes() { return new Paginator<>( @@ -157,9 +249,17 @@ public Iterable listSecrets(String scope) { * Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret * data cannot be retrieved using this API. Users need the READ permission to make this call. * - *

The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws - * `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the - * user does not have permission to make this API call. + *

Example response: + * + *

.. code:: + * + *

{ "secrets": [ { "key": "my-string-key"", "last_updated_timestamp": "1520467595000" }, { + * "key": "my-byte-key", "last_updated_timestamp": "1520467595000" }, ] } + * + *

The lastUpdatedTimestamp returned is in milliseconds since epoch. + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ public Iterable listSecrets(ListSecretsRequest request) { return new Paginator<>( @@ -171,30 +271,34 @@ public void putAcl(String scope, String principal, AclPermission permission) { } /** - * Creates or overwrites the Access Control List (ACL) associated with the given principal (user - * or group) on the specified scope point. - * - *

In general, a user or group will use the most powerful permission available to them, and - * permissions are ordered as follows: + * Creates or overwrites the ACL associated with the given principal (user or group) on the + * specified scope point. In general, a user or group will use the most powerful permission + * available to them, and permissions are ordered as follows: * - *

* `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - - * Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope - * and list what secrets are available. + *

* ``MANAGE`` - Allowed to change ACLs, and read and write to this secret scope. * ``WRITE`` + * - Allowed to read and write to this secret scope. * ``READ`` - Allowed to read this secret + * scope and list what secrets are available. * *

Note that in general, secret values can only be read from within a command on a cluster (for * example, through a notebook). There is no API to read the actual secret value material outside * of a cluster. However, the user's permission will be applied based on who is executing the * command, and they must have at least READ permission. * - *

Users must have the `MANAGE` permission to invoke this API. + *

Users must have the ``MANAGE`` permission to invoke this API. + * + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-secret-scope", "principal": "data-scientists", "permission": "READ" } * *

The principal is a user or group name corresponding to an existing Databricks principal to * be granted or revoked access. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws - * `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws - * `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED` - * if the user does not have permission to make this API call. + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``RESOURCE_ALREADY_EXISTS`` if a permission for the principal already exists. Throws + * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ public void putAcl(PutAcl request) { impl.putAcl(request); @@ -207,20 +311,28 @@ public void putSecret(String scope, String key) { /** * Inserts a secret under the provided scope with the given name. If a secret already exists with * the same name, this command overwrites the existing secret's value. The server encrypts the - * secret using the secret scope's encryption settings before storing it. + * secret using the secret scope's encryption settings before storing it. You must have ``WRITE`` + * or ``MANAGE`` permission on the secret scope. + * + *

The secret key must consist of alphanumeric characters, dashes, underscores, and periods, + * and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum + * number of secrets in a given scope is 1000. + * + *

Example request: + * + *

.. code:: * - *

You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must - * consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 - * characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a - * given scope is 1000. + *

{ "scope": "my-databricks-scope", "key": "my-string-key", "string_value": "foobar" } * *

The input fields "string_value" or "bytes_value" specify the type of the secret, which will * determine the value returned when the secret value is requested. Exactly one must be specified. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws - * `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws - * `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws - * `PERMISSION_DENIED` if the user does not have permission to make this API call. + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of secrets in scope is exceeded. Throws + * ``INVALID_PARAMETER_VALUE`` if the request parameters are invalid. Throws ``PERMISSION_DENIED`` + * if the user does not have permission to make this API call. Throws ``MALFORMED_REQUEST`` if + * request is incorrectly formatted or conflicting. Throws ``BAD_REQUEST`` if request is made + * against Azure KeyVault backed scope. */ public void putSecret(PutSecret request) { impl.putSecret(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java index 78a15a19c..80e812a95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java @@ -22,9 +22,8 @@ public void createScope(CreateScope request) { try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, CreateScopeResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -36,9 +35,8 @@ public void deleteAcl(DeleteAcl request) { try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteAclResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -50,9 +48,8 @@ public void deleteScope(DeleteScope request) { try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteScopeResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -66,7 +63,7 @@ public void deleteSecret(DeleteSecret request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteSecretResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -142,9 +139,8 @@ public void putAcl(PutAcl request) { try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PutAclResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -156,9 +152,8 @@ public void putSecret(PutSecret request) { try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, PutSecretResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java index f8de5787c..d189d8f7e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java @@ -21,72 +21,164 @@ @Generated public interface SecretsService { /** - * The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and - * may not exceed 128 characters. + * Creates a new secret scope. + * + *

The scope name must consist of alphanumeric characters, dashes, underscores, and periods, + * and may not exceed 128 characters. + * + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-simple-databricks-scope", "initial_manage_principal": "users" + * "scope_backend_type": "databricks|azure_keyvault", # below is only required if scope type is + * azure_keyvault "backend_azure_keyvault": { "resource_id": + * "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/xxxx/providers/Microsoft.KeyVault/vaults/xxxx", + * "tenant_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "dns_name": + * "https://xxxx.vault.azure.net/", } } + * + *

If ``initial_manage_principal`` is specified, the initial ACL applied to the scope is + * applied to the supplied principal (user or group) with ``MANAGE`` permissions. The only + * supported principal for this option is the group ``users``, which contains all users in the + * workspace. If ``initial_manage_principal`` is not specified, the initial ACL with ``MANAGE`` + * permission applied to the scope is assigned to the API request issuer's user identity. + * + *

If ``scope_backend_type`` is ``azure_keyvault``, a secret scope is created with secrets from + * a given Azure KeyVault. The caller must provide the keyvault_resource_id and the tenant_id for + * the key vault. If ``scope_backend_type`` is ``databricks`` or is unspecified, an empty secret + * scope is created and stored in Databricks's own storage. + * + *

Throws ``RESOURCE_ALREADY_EXISTS`` if a scope with the given name already exists. Throws + * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of scopes in the workspace is exceeded. Throws + * ``INVALID_PARAMETER_VALUE`` if the scope name is invalid. Throws ``BAD_REQUEST`` if request + * violated constraints. Throws ``CUSTOMER_UNAUTHORIZED`` if normal user attempts to create a + * scope with name reserved for databricks internal usage. Throws ``UNAUTHENTICATED`` if unable to + * verify user access permission on Azure KeyVault */ void createScope(CreateScope createScope); /** * Deletes the given ACL on the given scope. * - *

Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` - * if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does - * not have permission to make this API call. + *

Users must have the ``MANAGE`` permission to invoke this API. + * + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-secret-scope", "principal": "data-scientists" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope, principal, or ACL exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. */ void deleteAcl(DeleteAcl deleteAcl); /** * Deletes a secret scope. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if - * the user does not have permission to make this API call. + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-secret-scope" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if the scope does not exist. Throws ``PERMISSION_DENIED`` + * if the user does not have permission to make this API call. Throws ``BAD_REQUEST`` if system + * user attempts to delete internal secret scope. */ void deleteScope(DeleteScope deleteScope); /** - * Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on - * the secret scope. + * Deletes the secret stored in this secret scope. You must have ``WRITE`` or ``MANAGE`` + * permission on the Secret Scope. + * + *

Example request: * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws - * `PERMISSION_DENIED` if the user does not have permission to make this API call. + *

.. code:: + * + *

{ "scope": "my-secret-scope", "key": "my-secret-key" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + * ``BAD_REQUEST`` if system user attempts to delete an internal secret, or request is made + * against Azure KeyVault backed scope. */ void deleteSecret(DeleteSecret deleteSecret); /** - * Gets the details about the given ACL, such as the group and permission. Users must have the - * `MANAGE` permission to invoke this API. + * Describes the details about the given ACL, such as the group and permission. + * + *

Users must have the ``MANAGE`` permission to invoke this API. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` - * if the user does not have permission to make this API call. + *

Example response: + * + *

.. code:: + * + *

{ "principal": "data-scientists", "permission": "READ" } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. */ AclItem getAcl(GetAclRequest getAclRequest); /** - * Gets the bytes representation of a secret value for the specified scope and key. + * Gets a secret for a given key and scope. This API can only be called from the DBUtils + * interface. Users need the READ permission to make this call. + * + *

Example response: + * + *

.. code:: * - *

Users need the READ permission to make this call. + *

{ "key": "my-string-key", "value": } * *

Note that the secret value returned is in bytes. The interpretation of the bytes is * determined by the caller in DBUtils and the type the data is decoded into. * - *

Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. - * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. + * + *

Note: This is explicitly an undocumented API. It also doesn't need to be supported for the + * /preview prefix, because it's not a customer-facing API (i.e. only used for DBUtils SecretUtils + * to fetch secrets). + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws + * ``BAD_REQUEST`` if normal user calls get secret outside of a notebook. AKV specific errors: + * Throws ``INVALID_PARAMETER_VALUE`` if secret name is not alphanumeric or too long. Throws + * ``PERMISSION_DENIED`` if secret manager cannot access AKV with 403 error Throws + * ``MALFORMED_REQUEST`` if secret manager cannot access AKV with any other 4xx error */ GetSecretResponse getSecret(GetSecretRequest getSecretRequest); /** - * List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this - * API. + * Lists the ACLs set on the given scope. + * + *

Users must have the ``MANAGE`` permission to invoke this API. + * + *

Example response: + * + *

.. code:: * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` - * if the user does not have permission to make this API call. + *

{ "acls": [{ "principal": "admins", "permission": "MANAGE" },{ "principal": + * "data-scientists", "permission": "READ" }] } + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ ListAclsResponse listAcls(ListAclsRequest listAclsRequest); /** * Lists all secret scopes available in the workspace. * - *

Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. + *

Example response: + * + *

.. code:: + * + *

{ "scopes": [{ "name": "my-databricks-scope", "backend_type": "DATABRICKS" },{ "name": + * "mount-points", "backend_type": "DATABRICKS" }] } + * + *

Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ ListScopesResponse listScopes(); @@ -94,57 +186,77 @@ public interface SecretsService { * Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret * data cannot be retrieved using this API. Users need the READ permission to make this call. * - *

The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws - * `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the - * user does not have permission to make this API call. + *

Example response: + * + *

.. code:: + * + *

{ "secrets": [ { "key": "my-string-key"", "last_updated_timestamp": "1520467595000" }, { + * "key": "my-byte-key", "last_updated_timestamp": "1520467595000" }, ] } + * + *

The lastUpdatedTimestamp returned is in milliseconds since epoch. + * + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ ListSecretsResponse listSecrets(ListSecretsRequest listSecretsRequest); /** - * Creates or overwrites the Access Control List (ACL) associated with the given principal (user - * or group) on the specified scope point. - * - *

In general, a user or group will use the most powerful permission available to them, and - * permissions are ordered as follows: + * Creates or overwrites the ACL associated with the given principal (user or group) on the + * specified scope point. In general, a user or group will use the most powerful permission + * available to them, and permissions are ordered as follows: * - *

* `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - - * Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope - * and list what secrets are available. + *

* ``MANAGE`` - Allowed to change ACLs, and read and write to this secret scope. * ``WRITE`` + * - Allowed to read and write to this secret scope. * ``READ`` - Allowed to read this secret + * scope and list what secrets are available. * *

Note that in general, secret values can only be read from within a command on a cluster (for * example, through a notebook). There is no API to read the actual secret value material outside * of a cluster. However, the user's permission will be applied based on who is executing the * command, and they must have at least READ permission. * - *

Users must have the `MANAGE` permission to invoke this API. + *

Users must have the ``MANAGE`` permission to invoke this API. + * + *

Example request: + * + *

.. code:: + * + *

{ "scope": "my-secret-scope", "principal": "data-scientists", "permission": "READ" } * *

The principal is a user or group name corresponding to an existing Databricks principal to * be granted or revoked access. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws - * `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws - * `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED` - * if the user does not have permission to make this API call. + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``RESOURCE_ALREADY_EXISTS`` if a permission for the principal already exists. Throws + * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. Throws + * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. */ void putAcl(PutAcl putAcl); /** * Inserts a secret under the provided scope with the given name. If a secret already exists with * the same name, this command overwrites the existing secret's value. The server encrypts the - * secret using the secret scope's encryption settings before storing it. + * secret using the secret scope's encryption settings before storing it. You must have ``WRITE`` + * or ``MANAGE`` permission on the secret scope. + * + *

The secret key must consist of alphanumeric characters, dashes, underscores, and periods, + * and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum + * number of secrets in a given scope is 1000. + * + *

Example request: + * + *

.. code:: * - *

You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must - * consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 - * characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a - * given scope is 1000. + *

{ "scope": "my-databricks-scope", "key": "my-string-key", "string_value": "foobar" } * *

The input fields "string_value" or "bytes_value" specify the type of the secret, which will * determine the value returned when the secret value is requested. Exactly one must be specified. * - *

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws - * `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws - * `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws - * `PERMISSION_DENIED` if the user does not have permission to make this API call. + *

Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws + * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of secrets in scope is exceeded. Throws + * ``INVALID_PARAMETER_VALUE`` if the request parameters are invalid. Throws ``PERMISSION_DENIED`` + * if the user does not have permission to make this API call. Throws ``MALFORMED_REQUEST`` if + * request is incorrectly formatted or conflicting. Throws ``BAD_REQUEST`` if request is made + * against Azure KeyVault backed scope. */ void putSecret(PutSecret putSecret); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java deleted file mode 100755 index 20e001bd3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateCredentialsResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateCredentialsResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java deleted file mode 100755 index c7d596164..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.workspace; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -@Generated -public class UpdateRepoResponse { - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hash(); - } - - @Override - public String toString() { - return new ToStringer(UpdateRepoResponse.class).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java index 477ceb249..adb7adb44 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java @@ -24,7 +24,7 @@ public void delete(Delete request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, DeleteResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -97,7 +97,7 @@ public void importContent(Import request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ImportResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -124,7 +124,7 @@ public void mkdirs(Mkdirs request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, MkdirsResponse.class); + apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java index edb8f3b3e..de751831a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java @@ -13,7 +13,7 @@ public class WorkspaceObjectAccessControlRequest { @JsonProperty("group_name") private String groupName; - /** Permission level */ + /** */ @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java index 748bd2854..0fb785b84 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java @@ -18,7 +18,7 @@ public class WorkspaceObjectPermission { @JsonProperty("inherited_from_object") private Collection inheritedFromObject; - /** Permission level */ + /** */ @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java index 31b42d41f..494dd286c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java @@ -13,7 +13,7 @@ public class WorkspaceObjectPermissionsDescription { @JsonProperty("description") private String description; - /** Permission level */ + /** */ @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel;