diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 12c62ec0b..42f35bf7e 100755
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-86481d2fa23e3fb65128ea34b045fe585f7643f1
\ No newline at end of file
+125e8a088be77ed7aae64f07daf977c2c4fa8153
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 2510002e8..5eeb280fb 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -141,6 +141,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/SpaceUpdateSta
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/SpaceUpdateStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/TelemetryExportDestination.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UnityCatalog.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateCustomTemplateRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateSpaceOperation.java linguist-generated=true
@@ -976,14 +978,22 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboar
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardView.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/EvaluationStatusType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationSummary.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateEvalRunRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationMessageRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalAssessment.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponseType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResult.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalRunResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java linguist-generated=true
@@ -993,6 +1003,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGen
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalResultDetailsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalRunRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true
@@ -1003,6 +1015,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieLis
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationMessagesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java linguist-generated=true
@@ -1045,6 +1061,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAtt
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ScoreReason.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscriber.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true
@@ -1115,6 +1132,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfigSchemaNames.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CreateCatalogConfigRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AggregationGranularity.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java linguist-generated=true
@@ -1548,6 +1576,29 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport.
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WidgetErrorDetail.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/SyncKnowledgeSourcesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeAssistantRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeSourceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java linguist-generated=true
diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md
old mode 100644
new mode 100755
index e780e8ee3..9c54a0919
--- a/NEXT_CHANGELOG.md
+++ b/NEXT_CHANGELOG.md
@@ -13,3 +13,8 @@
### Internal Changes
### API Changes
+* Add `com.databricks.sdk.service.dataclassification` and `com.databricks.sdk.service.knowledgeassistants` packages.
+* Add `workspaceClient.dataClassification()` service.
+* Add `workspaceClient.knowledgeAssistants()` service.
+* Add `genieCreateEvalRun()`, `genieGetEvalResultDetails()`, `genieGetEvalRun()`, `genieListEvalResults()` and `genieListEvalRuns()` methods for `workspaceClient.genie()` service.
+* Add `telemetryExportDestinations` field for `com.databricks.sdk.service.apps.App`.
\ No newline at end of file
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
index f20ce2df3..28a9ee5d7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
@@ -353,15 +353,15 @@ public AccountIpAccessListsAPI ipAccessLists() {
/**
* These APIs manage log delivery configurations for this account. The two supported log types for
- * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
- * feature works with all account ID types.
+ * this API are _billable usage logs_ (AWS only) and _audit logs_ (AWS and GCP). This feature is
+ * in Public Preview. This feature works with all account ID types.
*
*
Log delivery works with all account types. However, if your account is on the E2 version of
* the platform or on a select custom plan that allows multiple workspaces per account, you can
* optionally configure different storage destinations for each workspace. Log delivery status is
* also provided to know the latest status of log delivery attempts.
*
- *
The high-level flow of billable usage delivery:
+ *
The high-level flow of billable usage delivery (AWS only):
*
*
1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
* Using Databricks APIs, call the Account API to create a [storage configuration
@@ -381,35 +381,34 @@ public AccountIpAccessListsAPI ipAccessLists() {
* solely delivers logs related to the specified workspaces. You can create multiple types of
* delivery configurations per account.
*
- *
For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * `//billable-usage/csv/`, where `` is the name of the optional
- * delivery path prefix you set up during log delivery configuration. Files are named
+ * For billable usage delivery (AWS only): * For more information about billable usage logs,
+ * see [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery
+ * location is `//billable-usage/csv/`, where `` is the name of the
+ * optional delivery path prefix you set up during log delivery configuration. Files are named
* `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to
* specific workspaces (_workspace level_ logs). You can aggregate usage for your entire account
* by creating an _account level_ delivery configuration that delivers logs for all current and
* future workspaces in your account. * The files are delivered daily by overwriting the month's
* CSV file for each workspace.
*
- * For audit log delivery: * For more information about about audit log delivery, see [Audit
- * log delivery], which includes information about the used JSON schema. * The delivery location
- * is
+ *
For audit log delivery (AWS and GCP): * For more information about about audit log delivery,
+ * see Audit log delivery [AWS] or [GCP], which includes information about the used JSON schema. *
+ * The delivery location is
* `//workspaceId=/date=/auditlogs_.json`.
* Files may get overwritten with the same content multiple times to achieve exactly-once
* delivery. * If the audit log delivery configuration included specific workspace IDs, only
* _workspace-level_ audit logs for those workspaces are delivered. If the log delivery
* configuration applies to the entire account (_account level_ delivery configuration), the audit
* log delivery includes workspace-level audit logs for all workspaces in the account as well as
- * account-level audit logs. See [Audit log delivery] for details. * Auditable events are
- * typically available in logs within 15 minutes.
+ * account-level audit logs. See Audit log delivery [AWS] or [GCP] for details. * Auditable events
+ * are typically available in logs within 15 minutes.
*
- * [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable
- * usage log delivery]:
+ *
[AWS]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+ * [Billable usage log delivery]:
* https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * [GCP]: https://docs.databricks.com/gcp/en/admin/account-settings/audit-logs [Usage page]:
+ * https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS
+ * S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*/
public LogDeliveryAPI logDelivery() {
return logDeliveryAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 5b71b785e..fae5f8e85 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -103,6 +103,8 @@
import com.databricks.sdk.service.dashboards.LakeviewService;
import com.databricks.sdk.service.database.DatabaseAPI;
import com.databricks.sdk.service.database.DatabaseService;
+import com.databricks.sdk.service.dataclassification.DataClassificationAPI;
+import com.databricks.sdk.service.dataclassification.DataClassificationService;
import com.databricks.sdk.service.dataquality.DataQualityAPI;
import com.databricks.sdk.service.dataquality.DataQualityService;
import com.databricks.sdk.service.files.DbfsService;
@@ -136,6 +138,8 @@
import com.databricks.sdk.service.jobs.JobsService;
import com.databricks.sdk.service.jobs.PolicyComplianceForJobsAPI;
import com.databricks.sdk.service.jobs.PolicyComplianceForJobsService;
+import com.databricks.sdk.service.knowledgeassistants.KnowledgeAssistantsAPI;
+import com.databricks.sdk.service.knowledgeassistants.KnowledgeAssistantsService;
import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsAPI;
import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsService;
import com.databricks.sdk.service.marketplace.ConsumerInstallationsAPI;
@@ -292,6 +296,7 @@ public class WorkspaceClient {
private CurrentUserAPI currentUserAPI;
private DashboardWidgetsAPI dashboardWidgetsAPI;
private DashboardsAPI dashboardsAPI;
+ private DataClassificationAPI dataClassificationAPI;
private DataQualityAPI dataQualityAPI;
private DataSourcesAPI dataSourcesAPI;
private DatabaseAPI databaseAPI;
@@ -316,6 +321,7 @@ public class WorkspaceClient {
private InstanceProfilesAPI instanceProfilesAPI;
private IpAccessListsAPI ipAccessListsAPI;
private JobsAPI jobsAPI;
+ private KnowledgeAssistantsAPI knowledgeAssistantsAPI;
private LakeviewAPI lakeviewAPI;
private LakeviewEmbeddedAPI lakeviewEmbeddedAPI;
private LibrariesAPI librariesAPI;
@@ -425,6 +431,7 @@ public WorkspaceClient(DatabricksConfig config) {
currentUserAPI = new CurrentUserAPI(apiClient);
dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient);
dashboardsAPI = new DashboardsAPI(apiClient);
+ dataClassificationAPI = new DataClassificationAPI(apiClient);
dataQualityAPI = new DataQualityAPI(apiClient);
dataSourcesAPI = new DataSourcesAPI(apiClient);
databaseAPI = new DatabaseAPI(apiClient);
@@ -449,6 +456,7 @@ public WorkspaceClient(DatabricksConfig config) {
instanceProfilesAPI = new InstanceProfilesAPI(apiClient);
ipAccessListsAPI = new IpAccessListsAPI(apiClient);
jobsAPI = new JobsAPI(apiClient);
+ knowledgeAssistantsAPI = new KnowledgeAssistantsAPI(apiClient);
lakeviewAPI = new LakeviewAPI(apiClient);
lakeviewEmbeddedAPI = new LakeviewEmbeddedAPI(apiClient);
librariesAPI = new LibrariesAPI(apiClient);
@@ -818,6 +826,15 @@ public DashboardsAPI dashboards() {
return dashboardsAPI;
}
+ /**
+ * Manage data classification for Unity Catalog catalogs. Data classification automatically
+ * identifies and tags sensitive data (PII) in Unity Catalog tables. Each catalog can have at most
+ * one configuration resource that controls scanning behavior and auto-tagging rules.
+ */
+ public DataClassificationAPI dataClassification() {
+ return dataClassificationAPI;
+ }
+
/** Manage the data quality of Unity Catalog objects (currently support `schema` and `table`) */
public DataQualityAPI dataQuality() {
return dataQualityAPI;
@@ -1146,6 +1163,11 @@ public JobsAPI jobs() {
return jobsAPI;
}
+ /** Manage Knowledge Assistants and related resources. */
+ public KnowledgeAssistantsAPI knowledgeAssistants() {
+ return knowledgeAssistantsAPI;
+ }
+
/**
* These APIs provide specific management operations for Lakeview dashboards. Generic resource
* management can be done with Workspace API (import, export, get-status, list, delete).
@@ -2463,6 +2485,17 @@ public WorkspaceClient withDashboardsAPI(DashboardsAPI dashboards) {
return this;
}
+ /** Replace the default DataClassificationService with a custom implementation. */
+ public WorkspaceClient withDataClassificationImpl(DataClassificationService dataClassification) {
+ return this.withDataClassificationAPI(new DataClassificationAPI(dataClassification));
+ }
+
+ /** Replace the default DataClassificationAPI with a custom implementation. */
+ public WorkspaceClient withDataClassificationAPI(DataClassificationAPI dataClassification) {
+ this.dataClassificationAPI = dataClassification;
+ return this;
+ }
+
/** Replace the default DataQualityService with a custom implementation. */
public WorkspaceClient withDataQualityImpl(DataQualityService dataQuality) {
return this.withDataQualityAPI(new DataQualityAPI(dataQuality));
@@ -2728,6 +2761,18 @@ public WorkspaceClient withJobsAPI(JobsAPI jobs) {
return this;
}
+ /** Replace the default KnowledgeAssistantsService with a custom implementation. */
+ public WorkspaceClient withKnowledgeAssistantsImpl(
+ KnowledgeAssistantsService knowledgeAssistants) {
+ return this.withKnowledgeAssistantsAPI(new KnowledgeAssistantsAPI(knowledgeAssistants));
+ }
+
+ /** Replace the default KnowledgeAssistantsAPI with a custom implementation. */
+ public WorkspaceClient withKnowledgeAssistantsAPI(KnowledgeAssistantsAPI knowledgeAssistants) {
+ this.knowledgeAssistantsAPI = knowledgeAssistants;
+ return this;
+ }
+
/** Replace the default LakeviewService with a custom implementation. */
public WorkspaceClient withLakeviewImpl(LakeviewService lakeview) {
return this.withLakeviewAPI(new LakeviewAPI(lakeview));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
index ef8f2a021..4d3757f2f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
@@ -117,6 +117,10 @@ public class App {
@JsonProperty("space")
private String space;
+ /** */
+ @JsonProperty("telemetry_export_destinations")
+ private Collection telemetryExportDestinations;
+
/** The update time of the app. Formatted timestamp in ISO 6801. */
@JsonProperty("update_time")
private String updateTime;
@@ -344,6 +348,16 @@ public String getSpace() {
return space;
}
+ public App setTelemetryExportDestinations(
+ Collection telemetryExportDestinations) {
+ this.telemetryExportDestinations = telemetryExportDestinations;
+ return this;
+ }
+
+ public Collection getTelemetryExportDestinations() {
+ return telemetryExportDestinations;
+ }
+
public App setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
@@ -417,6 +431,7 @@ public boolean equals(Object o) {
&& Objects.equals(servicePrincipalId, that.servicePrincipalId)
&& Objects.equals(servicePrincipalName, that.servicePrincipalName)
&& Objects.equals(space, that.space)
+ && Objects.equals(telemetryExportDestinations, that.telemetryExportDestinations)
&& Objects.equals(updateTime, that.updateTime)
&& Objects.equals(updater, that.updater)
&& Objects.equals(url, that.url)
@@ -450,6 +465,7 @@ public int hashCode() {
servicePrincipalId,
servicePrincipalName,
space,
+ telemetryExportDestinations,
updateTime,
updater,
url,
@@ -483,6 +499,7 @@ public String toString() {
.add("servicePrincipalId", servicePrincipalId)
.add("servicePrincipalName", servicePrincipalName)
.add("space", space)
+ .add("telemetryExportDestinations", telemetryExportDestinations)
.add("updateTime", updateTime)
.add("updater", updater)
.add("url", url)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/TelemetryExportDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/TelemetryExportDestination.java
new file mode 100755
index 000000000..962a7b3bb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/TelemetryExportDestination.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** A single telemetry export destination with its configuration and status. */
+@Generated
+public class TelemetryExportDestination {
+ /** */
+ @JsonProperty("unity_catalog")
+ private UnityCatalog unityCatalog;
+
+ public TelemetryExportDestination setUnityCatalog(UnityCatalog unityCatalog) {
+ this.unityCatalog = unityCatalog;
+ return this;
+ }
+
+ public UnityCatalog getUnityCatalog() {
+ return unityCatalog;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TelemetryExportDestination that = (TelemetryExportDestination) o;
+ return Objects.equals(unityCatalog, that.unityCatalog);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(unityCatalog);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(TelemetryExportDestination.class)
+ .add("unityCatalog", unityCatalog)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UnityCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UnityCatalog.java
new file mode 100755
index 000000000..314f79b04
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UnityCatalog.java
@@ -0,0 +1,75 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Unity Catalog Destinations for OTEL telemetry export. */
+@Generated
+public class UnityCatalog {
+ /** Unity Catalog table for OTEL logs. */
+ @JsonProperty("logs_table")
+ private String logsTable;
+
+ /** Unity Catalog table for OTEL metrics. */
+ @JsonProperty("metrics_table")
+ private String metricsTable;
+
+ /** Unity Catalog table for OTEL traces (spans). */
+ @JsonProperty("traces_table")
+ private String tracesTable;
+
+ public UnityCatalog setLogsTable(String logsTable) {
+ this.logsTable = logsTable;
+ return this;
+ }
+
+ public String getLogsTable() {
+ return logsTable;
+ }
+
+ public UnityCatalog setMetricsTable(String metricsTable) {
+ this.metricsTable = metricsTable;
+ return this;
+ }
+
+ public String getMetricsTable() {
+ return metricsTable;
+ }
+
+ public UnityCatalog setTracesTable(String tracesTable) {
+ this.tracesTable = tracesTable;
+ return this;
+ }
+
+ public String getTracesTable() {
+ return tracesTable;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UnityCatalog that = (UnityCatalog) o;
+ return Objects.equals(logsTable, that.logsTable)
+ && Objects.equals(metricsTable, that.metricsTable)
+ && Objects.equals(tracesTable, that.tracesTable);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(logsTable, metricsTable, tracesTable);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UnityCatalog.class)
+ .add("logsTable", logsTable)
+ .add("metricsTable", metricsTable)
+ .add("tracesTable", tracesTable)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
index 92853f21b..aa9a56bb8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
@@ -9,15 +9,15 @@
/**
* These APIs manage log delivery configurations for this account. The two supported log types for
- * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
- * feature works with all account ID types.
+ * this API are _billable usage logs_ (AWS only) and _audit logs_ (AWS and GCP). This feature is in
+ * Public Preview. This feature works with all account ID types.
*
* Log delivery works with all account types. However, if your account is on the E2 version of
* the platform or on a select custom plan that allows multiple workspaces per account, you can
* optionally configure different storage destinations for each workspace. Log delivery status is
* also provided to know the latest status of log delivery attempts.
*
- *
The high-level flow of billable usage delivery:
+ *
The high-level flow of billable usage delivery (AWS only):
*
*
1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
* Using Databricks APIs, call the Account API to create a [storage configuration
@@ -37,9 +37,9 @@
* logs related to the specified workspaces. You can create multiple types of delivery
* configurations per account.
*
- *
For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * `//billable-usage/csv/`, where `` is the name of the optional
+ * For billable usage delivery (AWS only): * For more information about billable usage logs, see
+ * [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location
+ * is `//billable-usage/csv/`, where `` is the name of the optional
* delivery path prefix you set up during log delivery configuration. Files are named
* `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific
* workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating
@@ -47,24 +47,24 @@
* workspaces in your account. * The files are delivered daily by overwriting the month's CSV file
* for each workspace.
*
- * For audit log delivery: * For more information about about audit log delivery, see [Audit log
- * delivery], which includes information about the used JSON schema. * The delivery location is
+ *
For audit log delivery (AWS and GCP): * For more information about about audit log delivery,
+ * see Audit log delivery [AWS] or [GCP], which includes information about the used JSON schema. *
+ * The delivery location is
* `//workspaceId=/date=/auditlogs_.json`.
* Files may get overwritten with the same content multiple times to achieve exactly-once delivery.
* * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_
* audit logs for those workspaces are delivered. If the log delivery configuration applies to the
* entire account (_account level_ delivery configuration), the audit log delivery includes
* workspace-level audit logs for all workspaces in the account as well as account-level audit logs.
- * See [Audit log delivery] for details. * Auditable events are typically available in logs within
- * 15 minutes.
+ * See Audit log delivery [AWS] or [GCP] for details. * Auditable events are typically available in
+ * logs within 15 minutes.
*
- * [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
- * log delivery]:
+ *
[AWS]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+ * [Billable usage log delivery]:
* https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * [GCP]: https://docs.databricks.com/gcp/en/admin/account-settings/audit-logs [Usage page]:
+ * https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS S3
+ * bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*/
@Generated
public class LogDeliveryAPI {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
index 9c4796014..1470c7ad2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
@@ -5,15 +5,15 @@
/**
* These APIs manage log delivery configurations for this account. The two supported log types for
- * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
- * feature works with all account ID types.
+ * this API are _billable usage logs_ (AWS only) and _audit logs_ (AWS and GCP). This feature is in
+ * Public Preview. This feature works with all account ID types.
*
*
Log delivery works with all account types. However, if your account is on the E2 version of
* the platform or on a select custom plan that allows multiple workspaces per account, you can
* optionally configure different storage destinations for each workspace. Log delivery status is
* also provided to know the latest status of log delivery attempts.
*
- *
The high-level flow of billable usage delivery:
+ *
The high-level flow of billable usage delivery (AWS only):
*
*
1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
* Using Databricks APIs, call the Account API to create a [storage configuration
@@ -33,9 +33,9 @@
* logs related to the specified workspaces. You can create multiple types of delivery
* configurations per account.
*
- *
For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * `//billable-usage/csv/`, where `` is the name of the optional
+ * For billable usage delivery (AWS only): * For more information about billable usage logs, see
+ * [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location
+ * is `//billable-usage/csv/`, where `` is the name of the optional
* delivery path prefix you set up during log delivery configuration. Files are named
* `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific
* workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating
@@ -43,24 +43,24 @@
* workspaces in your account. * The files are delivered daily by overwriting the month's CSV file
* for each workspace.
*
- * For audit log delivery: * For more information about about audit log delivery, see [Audit log
- * delivery], which includes information about the used JSON schema. * The delivery location is
+ *
For audit log delivery (AWS and GCP): * For more information about about audit log delivery,
+ * see Audit log delivery [AWS] or [GCP], which includes information about the used JSON schema. *
+ * The delivery location is
* `//workspaceId=/date=/auditlogs_.json`.
* Files may get overwritten with the same content multiple times to achieve exactly-once delivery.
* * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_
* audit logs for those workspaces are delivered. If the log delivery configuration applies to the
* entire account (_account level_ delivery configuration), the audit log delivery includes
* workspace-level audit logs for all workspaces in the account as well as account-level audit logs.
- * See [Audit log delivery] for details. * Auditable events are typically available in logs within
- * 15 minutes.
+ * See Audit log delivery [AWS] or [GCP] for details. * Auditable events are typically available in
+ * logs within 15 minutes.
*
- * [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
- * log delivery]:
+ *
[AWS]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+ * [Billable usage log delivery]:
* https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * [GCP]: https://docs.databricks.com/gcp/en/admin/account-settings/audit-logs [Usage page]:
+ * https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS S3
+ * bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
*
*
This is the high-level interface, that contains generated methods.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
index 9cd70ad33..f0bb25743 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Latest kind: EXTERNAL_LOCATION_ONELAKE_MANAGED = 299; Next id: 300 */
+/** Latest kind: CONNECTION_OUTLOOK_OAUTH_M2M = 300; Next id: 301 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/EvaluationStatusType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/EvaluationStatusType.java
new file mode 100755
index 000000000..5fc3ce100
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/EvaluationStatusType.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum EvaluationStatusType {
+ DONE,
+ EVALUATION_CANCELLED,
+ EVALUATION_FAILED,
+ EVALUATION_TIMEOUT,
+ NOT_STARTED,
+ RUNNING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
index 9d40d74ae..977c2abf4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
@@ -184,6 +184,55 @@ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryRes
return impl.generateDownloadFullQueryResult(request);
}
+ /** Create and run evaluations for multiple benchmark questions in a Genie space. */
+ public GenieEvalRunResponse genieCreateEvalRun(GenieCreateEvalRunRequest request) {
+ return impl.genieCreateEvalRun(request);
+ }
+
+ public GenieEvalResultDetails genieGetEvalResultDetails(
+ String spaceId, String evalRunId, String resultId) {
+ return genieGetEvalResultDetails(
+ new GenieGetEvalResultDetailsRequest()
+ .setSpaceId(spaceId)
+ .setEvalRunId(evalRunId)
+ .setResultId(resultId));
+ }
+
+ /** Get details for evaluation results. */
+ public GenieEvalResultDetails genieGetEvalResultDetails(
+ GenieGetEvalResultDetailsRequest request) {
+ return impl.genieGetEvalResultDetails(request);
+ }
+
+ public GenieEvalRunResponse genieGetEvalRun(String spaceId, String evalRunId) {
+ return genieGetEvalRun(
+ new GenieGetEvalRunRequest().setSpaceId(spaceId).setEvalRunId(evalRunId));
+ }
+
+ /** Get evaluation run details. */
+ public GenieEvalRunResponse genieGetEvalRun(GenieGetEvalRunRequest request) {
+ return impl.genieGetEvalRun(request);
+ }
+
+ public GenieListEvalResultsResponse genieListEvalResults(String spaceId, String evalRunId) {
+ return genieListEvalResults(
+ new GenieListEvalResultsRequest().setSpaceId(spaceId).setEvalRunId(evalRunId));
+ }
+
+ /** List evaluation results for a specific evaluation run. */
+ public GenieListEvalResultsResponse genieListEvalResults(GenieListEvalResultsRequest request) {
+ return impl.genieListEvalResults(request);
+ }
+
+ public GenieListEvalRunsResponse genieListEvalRuns(String spaceId) {
+ return genieListEvalRuns(new GenieListEvalRunsRequest().setSpaceId(spaceId));
+ }
+
+ /** Lists all evaluation runs in a space. */
+ public GenieListEvalRunsResponse genieListEvalRuns(GenieListEvalRunsRequest request) {
+ return impl.genieListEvalRuns(request);
+ }
+
public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
String spaceId,
String conversationId,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateEvalRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateEvalRunRequest.java
new file mode 100755
index 000000000..1de6cdd68
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateEvalRunRequest.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GenieCreateEvalRunRequest {
+ /**
+ * List of benchmark question IDs to evaluate. These questions must exist in the specified Genie
+ * space. If none are specified, then all benchmark questions are evaluated.
+ */
+ @JsonProperty("benchmark_question_ids")
+ private Collection benchmarkQuestionIds;
+
+ /** The ID associated with the Genie space where the evaluations will be executed. */
+ @JsonIgnore private String spaceId;
+
+ public GenieCreateEvalRunRequest setBenchmarkQuestionIds(
+ Collection benchmarkQuestionIds) {
+ this.benchmarkQuestionIds = benchmarkQuestionIds;
+ return this;
+ }
+
+ public Collection getBenchmarkQuestionIds() {
+ return benchmarkQuestionIds;
+ }
+
+ public GenieCreateEvalRunRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieCreateEvalRunRequest that = (GenieCreateEvalRunRequest) o;
+ return Objects.equals(benchmarkQuestionIds, that.benchmarkQuestionIds)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(benchmarkQuestionIds, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieCreateEvalRunRequest.class)
+ .add("benchmarkQuestionIds", benchmarkQuestionIds)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalAssessment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalAssessment.java
new file mode 100755
index 000000000..59b29f6cf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalAssessment.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum GenieEvalAssessment {
+ BAD,
+ GOOD,
+ NEEDS_REVIEW,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponse.java
new file mode 100755
index 000000000..f76f3bc24
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponse.java
@@ -0,0 +1,75 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieEvalResponse {
+ /** The response content (either text or SQL query). */
+ @JsonProperty("response")
+ private String response;
+
+ /** Type of response */
+ @JsonProperty("response_type")
+ private GenieEvalResponseType responseType;
+
+ /** SQL Statement Execution response. */
+ @JsonProperty("sql_execution_result")
+ private com.databricks.sdk.service.sql.StatementResponse sqlExecutionResult;
+
+ public GenieEvalResponse setResponse(String response) {
+ this.response = response;
+ return this;
+ }
+
+ public String getResponse() {
+ return response;
+ }
+
+ public GenieEvalResponse setResponseType(GenieEvalResponseType responseType) {
+ this.responseType = responseType;
+ return this;
+ }
+
+ public GenieEvalResponseType getResponseType() {
+ return responseType;
+ }
+
+ public GenieEvalResponse setSqlExecutionResult(
+ com.databricks.sdk.service.sql.StatementResponse sqlExecutionResult) {
+ this.sqlExecutionResult = sqlExecutionResult;
+ return this;
+ }
+
+ public com.databricks.sdk.service.sql.StatementResponse getSqlExecutionResult() {
+ return sqlExecutionResult;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieEvalResponse that = (GenieEvalResponse) o;
+ return Objects.equals(response, that.response)
+ && Objects.equals(responseType, that.responseType)
+ && Objects.equals(sqlExecutionResult, that.sqlExecutionResult);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(response, responseType, sqlExecutionResult);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieEvalResponse.class)
+ .add("response", response)
+ .add("responseType", responseType)
+ .add("sqlExecutionResult", sqlExecutionResult)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponseType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponseType.java
new file mode 100755
index 000000000..9cca9a84f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResponseType.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum GenieEvalResponseType {
+ SQL,
+ TEXT,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResult.java
new file mode 100755
index 000000000..c192150e6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResult.java
@@ -0,0 +1,140 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Shows summary information for an evaluation result. For detailed information including SQL
+ * execution results, actual/expected responses, and assessment scores, use
+ * GenieGetEvalResultDetails.
+ */
+@Generated
+public class GenieEvalResult {
+ /** Stored snapshot of original benchmark answer text. */
+ @JsonProperty("benchmark_answer")
+ private String benchmarkAnswer;
+
+ /** The ID of the benchmark question that was evaluated. */
+ @JsonProperty("benchmark_question_id")
+ private String benchmarkQuestionId;
+
+ /** User ID who created evaluation result. */
+ @JsonProperty("created_by_user")
+ private Long createdByUser;
+
+ /** Stored snapshot of original benchmark question text. */
+ @JsonProperty("question")
+ private String question;
+
+ /** Unique identifier for this evaluation result. */
+ @JsonProperty("result_id")
+ private String resultId;
+
+ /** The ID of the space the evaluation result belongs to. */
+ @JsonProperty("space_id")
+ private String spaceId;
+
+ /** Current status of this evaluation result. */
+ @JsonProperty("status")
+ private EvaluationStatusType status;
+
+ public GenieEvalResult setBenchmarkAnswer(String benchmarkAnswer) {
+ this.benchmarkAnswer = benchmarkAnswer;
+ return this;
+ }
+
+ public String getBenchmarkAnswer() {
+ return benchmarkAnswer;
+ }
+
+ public GenieEvalResult setBenchmarkQuestionId(String benchmarkQuestionId) {
+ this.benchmarkQuestionId = benchmarkQuestionId;
+ return this;
+ }
+
+ public String getBenchmarkQuestionId() {
+ return benchmarkQuestionId;
+ }
+
+ public GenieEvalResult setCreatedByUser(Long createdByUser) {
+ this.createdByUser = createdByUser;
+ return this;
+ }
+
+ public Long getCreatedByUser() {
+ return createdByUser;
+ }
+
+ public GenieEvalResult setQuestion(String question) {
+ this.question = question;
+ return this;
+ }
+
+ public String getQuestion() {
+ return question;
+ }
+
+ public GenieEvalResult setResultId(String resultId) {
+ this.resultId = resultId;
+ return this;
+ }
+
+ public String getResultId() {
+ return resultId;
+ }
+
+ public GenieEvalResult setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ public GenieEvalResult setStatus(EvaluationStatusType status) {
+ this.status = status;
+ return this;
+ }
+
+ public EvaluationStatusType getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieEvalResult that = (GenieEvalResult) o;
+ return Objects.equals(benchmarkAnswer, that.benchmarkAnswer)
+ && Objects.equals(benchmarkQuestionId, that.benchmarkQuestionId)
+ && Objects.equals(createdByUser, that.createdByUser)
+ && Objects.equals(question, that.question)
+ && Objects.equals(resultId, that.resultId)
+ && Objects.equals(spaceId, that.spaceId)
+ && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ benchmarkAnswer, benchmarkQuestionId, createdByUser, question, resultId, spaceId, status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieEvalResult.class)
+ .add("benchmarkAnswer", benchmarkAnswer)
+ .add("benchmarkQuestionId", benchmarkQuestionId)
+ .add("createdByUser", createdByUser)
+ .add("question", question)
+ .add("resultId", resultId)
+ .add("spaceId", spaceId)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java
new file mode 100755
index 000000000..b8a65fa18
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java
@@ -0,0 +1,176 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Shows detailed information for an evaluation result. */
+@Generated
+public class GenieEvalResultDetails {
+ /** The actual response generated by Genie. */
+ @JsonProperty("actual_response")
+ private Collection actualResponse;
+
+ /** Assessment of the evaluation result: good, bad, or needs review */
+ @JsonProperty("assessment")
+ private GenieEvalAssessment assessment;
+
+ /** Reasons for the assessment score. */
+ @JsonProperty("assessment_reasons")
+ private Collection assessmentReasons;
+
+ /** The ID of the benchmark question that was evaluated. */
+ @JsonProperty("benchmark_question_id")
+ private String benchmarkQuestionId;
+
+ /** Current status of the evaluation run. */
+ @JsonProperty("eval_run_status")
+ private EvaluationStatusType evalRunStatus;
+
+ /** The expected responses from the benchmark. */
+ @JsonProperty("expected_response")
+ private Collection expectedResponse;
+
+ /** Whether this evaluation was manually assessed. */
+ @JsonProperty("manual_assessment")
+ private Boolean manualAssessment;
+
+ /** The unique identifier for the evaluation result. */
+ @JsonProperty("result_id")
+ private String resultId;
+
+ /** The ID of the space the evaluation result belongs to. */
+ @JsonProperty("space_id")
+ private String spaceId;
+
+ public GenieEvalResultDetails setActualResponse(Collection actualResponse) {
+ this.actualResponse = actualResponse;
+ return this;
+ }
+
+ public Collection getActualResponse() {
+ return actualResponse;
+ }
+
+ public GenieEvalResultDetails setAssessment(GenieEvalAssessment assessment) {
+ this.assessment = assessment;
+ return this;
+ }
+
+ public GenieEvalAssessment getAssessment() {
+ return assessment;
+ }
+
+ public GenieEvalResultDetails setAssessmentReasons(Collection assessmentReasons) {
+ this.assessmentReasons = assessmentReasons;
+ return this;
+ }
+
+ public Collection getAssessmentReasons() {
+ return assessmentReasons;
+ }
+
+ public GenieEvalResultDetails setBenchmarkQuestionId(String benchmarkQuestionId) {
+ this.benchmarkQuestionId = benchmarkQuestionId;
+ return this;
+ }
+
+ public String getBenchmarkQuestionId() {
+ return benchmarkQuestionId;
+ }
+
+ public GenieEvalResultDetails setEvalRunStatus(EvaluationStatusType evalRunStatus) {
+ this.evalRunStatus = evalRunStatus;
+ return this;
+ }
+
+ public EvaluationStatusType getEvalRunStatus() {
+ return evalRunStatus;
+ }
+
+ public GenieEvalResultDetails setExpectedResponse(
+ Collection expectedResponse) {
+ this.expectedResponse = expectedResponse;
+ return this;
+ }
+
+ public Collection getExpectedResponse() {
+ return expectedResponse;
+ }
+
+ public GenieEvalResultDetails setManualAssessment(Boolean manualAssessment) {
+ this.manualAssessment = manualAssessment;
+ return this;
+ }
+
+ public Boolean getManualAssessment() {
+ return manualAssessment;
+ }
+
+ public GenieEvalResultDetails setResultId(String resultId) {
+ this.resultId = resultId;
+ return this;
+ }
+
+ public String getResultId() {
+ return resultId;
+ }
+
+ public GenieEvalResultDetails setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieEvalResultDetails that = (GenieEvalResultDetails) o;
+ return Objects.equals(actualResponse, that.actualResponse)
+ && Objects.equals(assessment, that.assessment)
+ && Objects.equals(assessmentReasons, that.assessmentReasons)
+ && Objects.equals(benchmarkQuestionId, that.benchmarkQuestionId)
+ && Objects.equals(evalRunStatus, that.evalRunStatus)
+ && Objects.equals(expectedResponse, that.expectedResponse)
+ && Objects.equals(manualAssessment, that.manualAssessment)
+ && Objects.equals(resultId, that.resultId)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ actualResponse,
+ assessment,
+ assessmentReasons,
+ benchmarkQuestionId,
+ evalRunStatus,
+ expectedResponse,
+ manualAssessment,
+ resultId,
+ spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieEvalResultDetails.class)
+ .add("actualResponse", actualResponse)
+ .add("assessment", assessment)
+ .add("assessmentReasons", assessmentReasons)
+ .add("benchmarkQuestionId", benchmarkQuestionId)
+ .add("evalRunStatus", evalRunStatus)
+ .add("expectedResponse", expectedResponse)
+ .add("manualAssessment", manualAssessment)
+ .add("resultId", resultId)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalRunResponse.java
new file mode 100755
index 000000000..2e51186cd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalRunResponse.java
@@ -0,0 +1,173 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieEvalRunResponse {
+ /** Timestamp when the evaluation run was created (milliseconds since epoch). */
+ @JsonProperty("created_timestamp")
+ private Long createdTimestamp;
+
+ /** The unique identifier for the evaluation run. */
+ @JsonProperty("eval_run_id")
+ private String evalRunId;
+
+ /** Current status of the evaluation run. */
+ @JsonProperty("eval_run_status")
+ private EvaluationStatusType evalRunStatus;
+
+ /** Timestamp when the evaluation run was last updated (milliseconds since epoch). */
+ @JsonProperty("last_updated_timestamp")
+ private Long lastUpdatedTimestamp;
+
+ /** Number of questions answered correctly. */
+ @JsonProperty("num_correct")
+ private Long numCorrect;
+
+ /** Number of questions that have been completed. */
+ @JsonProperty("num_done")
+ private Long numDone;
+
+ /** Number of questions that need manual review. */
+ @JsonProperty("num_needs_review")
+ private Long numNeedsReview;
+
+ /** Total number of questions in the evaluation run. */
+ @JsonProperty("num_questions")
+ private Long numQuestions;
+
+ /** User ID who initiated the evaluation run. */
+ @JsonProperty("run_by_user")
+ private Long runByUser;
+
+ public GenieEvalRunResponse setCreatedTimestamp(Long createdTimestamp) {
+ this.createdTimestamp = createdTimestamp;
+ return this;
+ }
+
+ public Long getCreatedTimestamp() {
+ return createdTimestamp;
+ }
+
+ public GenieEvalRunResponse setEvalRunId(String evalRunId) {
+ this.evalRunId = evalRunId;
+ return this;
+ }
+
+ public String getEvalRunId() {
+ return evalRunId;
+ }
+
+ public GenieEvalRunResponse setEvalRunStatus(EvaluationStatusType evalRunStatus) {
+ this.evalRunStatus = evalRunStatus;
+ return this;
+ }
+
+ public EvaluationStatusType getEvalRunStatus() {
+ return evalRunStatus;
+ }
+
+ public GenieEvalRunResponse setLastUpdatedTimestamp(Long lastUpdatedTimestamp) {
+ this.lastUpdatedTimestamp = lastUpdatedTimestamp;
+ return this;
+ }
+
+ public Long getLastUpdatedTimestamp() {
+ return lastUpdatedTimestamp;
+ }
+
+ public GenieEvalRunResponse setNumCorrect(Long numCorrect) {
+ this.numCorrect = numCorrect;
+ return this;
+ }
+
+ public Long getNumCorrect() {
+ return numCorrect;
+ }
+
+ public GenieEvalRunResponse setNumDone(Long numDone) {
+ this.numDone = numDone;
+ return this;
+ }
+
+ public Long getNumDone() {
+ return numDone;
+ }
+
+ public GenieEvalRunResponse setNumNeedsReview(Long numNeedsReview) {
+ this.numNeedsReview = numNeedsReview;
+ return this;
+ }
+
+ public Long getNumNeedsReview() {
+ return numNeedsReview;
+ }
+
+ public GenieEvalRunResponse setNumQuestions(Long numQuestions) {
+ this.numQuestions = numQuestions;
+ return this;
+ }
+
+ public Long getNumQuestions() {
+ return numQuestions;
+ }
+
+ public GenieEvalRunResponse setRunByUser(Long runByUser) {
+ this.runByUser = runByUser;
+ return this;
+ }
+
+ public Long getRunByUser() {
+ return runByUser;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieEvalRunResponse that = (GenieEvalRunResponse) o;
+ return Objects.equals(createdTimestamp, that.createdTimestamp)
+ && Objects.equals(evalRunId, that.evalRunId)
+ && Objects.equals(evalRunStatus, that.evalRunStatus)
+ && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp)
+ && Objects.equals(numCorrect, that.numCorrect)
+ && Objects.equals(numDone, that.numDone)
+ && Objects.equals(numNeedsReview, that.numNeedsReview)
+ && Objects.equals(numQuestions, that.numQuestions)
+ && Objects.equals(runByUser, that.runByUser);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createdTimestamp,
+ evalRunId,
+ evalRunStatus,
+ lastUpdatedTimestamp,
+ numCorrect,
+ numDone,
+ numNeedsReview,
+ numQuestions,
+ runByUser);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieEvalRunResponse.class)
+ .add("createdTimestamp", createdTimestamp)
+ .add("evalRunId", evalRunId)
+ .add("evalRunStatus", evalRunStatus)
+ .add("lastUpdatedTimestamp", lastUpdatedTimestamp)
+ .add("numCorrect", numCorrect)
+ .add("numDone", numDone)
+ .add("numNeedsReview", numNeedsReview)
+ .add("numQuestions", numQuestions)
+ .add("runByUser", runByUser)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalResultDetailsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalResultDetailsRequest.java
new file mode 100755
index 000000000..24dd4e25d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalResultDetailsRequest.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieGetEvalResultDetailsRequest {
+ /** The unique identifier for the evaluation run. */
+ @JsonIgnore private String evalRunId;
+
+ /** The unique identifier for the evaluation result. */
+ @JsonIgnore private String resultId;
+
+ /** The ID associated with the Genie space where the evaluation run is located. */
+ @JsonIgnore private String spaceId;
+
+ public GenieGetEvalResultDetailsRequest setEvalRunId(String evalRunId) {
+ this.evalRunId = evalRunId;
+ return this;
+ }
+
+ public String getEvalRunId() {
+ return evalRunId;
+ }
+
+ public GenieGetEvalResultDetailsRequest setResultId(String resultId) {
+ this.resultId = resultId;
+ return this;
+ }
+
+ public String getResultId() {
+ return resultId;
+ }
+
+ public GenieGetEvalResultDetailsRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGetEvalResultDetailsRequest that = (GenieGetEvalResultDetailsRequest) o;
+ return Objects.equals(evalRunId, that.evalRunId)
+ && Objects.equals(resultId, that.resultId)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(evalRunId, resultId, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGetEvalResultDetailsRequest.class)
+ .add("evalRunId", evalRunId)
+ .add("resultId", resultId)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalRunRequest.java
new file mode 100755
index 000000000..945d5200e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetEvalRunRequest.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieGetEvalRunRequest {
+ /** */
+ @JsonIgnore private String evalRunId;
+
+ /** The ID associated with the Genie space where the evaluation run is located. */
+ @JsonIgnore private String spaceId;
+
+ public GenieGetEvalRunRequest setEvalRunId(String evalRunId) {
+ this.evalRunId = evalRunId;
+ return this;
+ }
+
+ public String getEvalRunId() {
+ return evalRunId;
+ }
+
+ public GenieGetEvalRunRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGetEvalRunRequest that = (GenieGetEvalRunRequest) o;
+ return Objects.equals(evalRunId, that.evalRunId) && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(evalRunId, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGetEvalRunRequest.class)
+ .add("evalRunId", evalRunId)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
index 86023a2dd..f9200fa9e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
@@ -164,6 +164,101 @@ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryRes
}
}
+ @Override
+ public GenieEvalRunResponse genieCreateEvalRun(GenieCreateEvalRunRequest request) {
+ String path = String.format("/api/2.0/genie/spaces/%s/eval-runs", request.getSpaceId());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, GenieEvalRunResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public GenieEvalResultDetails genieGetEvalResultDetails(
+ GenieGetEvalResultDetailsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/genie/spaces/%s/eval-runs/%s/results/%s",
+ request.getSpaceId(), request.getEvalRunId(), request.getResultId());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, GenieEvalResultDetails.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public GenieEvalRunResponse genieGetEvalRun(GenieGetEvalRunRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/genie/spaces/%s/eval-runs/%s", request.getSpaceId(), request.getEvalRunId());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, GenieEvalRunResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public GenieListEvalResultsResponse genieListEvalResults(GenieListEvalResultsRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/genie/spaces/%s/eval-runs/%s/results",
+ request.getSpaceId(), request.getEvalRunId());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, GenieListEvalResultsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public GenieListEvalRunsResponse genieListEvalRuns(GenieListEvalRunsRequest request) {
+ String path = String.format("/api/2.0/genie/spaces/%s/eval-runs", request.getSpaceId());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, GenieListEvalRunsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
GenieGetDownloadFullQueryResultRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsRequest.java
new file mode 100755
index 000000000..30bc4a5ab
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsRequest.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieListEvalResultsRequest {
+ /** The unique identifier for the evaluation run. */
+ @JsonIgnore private String evalRunId;
+
+ /** Maximum number of eval results to return per page. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** Opaque token to retrieve the next page of results. */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** The ID associated with the Genie space where the evaluation run is located. */
+ @JsonIgnore private String spaceId;
+
+ public GenieListEvalResultsRequest setEvalRunId(String evalRunId) {
+ this.evalRunId = evalRunId;
+ return this;
+ }
+
+ public String getEvalRunId() {
+ return evalRunId;
+ }
+
+ public GenieListEvalResultsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public GenieListEvalResultsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public GenieListEvalResultsRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieListEvalResultsRequest that = (GenieListEvalResultsRequest) o;
+ return Objects.equals(evalRunId, that.evalRunId)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(evalRunId, pageSize, pageToken, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieListEvalResultsRequest.class)
+ .add("evalRunId", evalRunId)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsResponse.java
new file mode 100755
index 000000000..95208b47b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsResponse.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GenieListEvalResultsResponse {
+ /** List of evaluation results for the specified run. */
+ @JsonProperty("eval_results")
+ private Collection evalResults;
+
+ /** The token to use for retrieving the next page of results. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public GenieListEvalResultsResponse setEvalResults(Collection evalResults) {
+ this.evalResults = evalResults;
+ return this;
+ }
+
+ public Collection getEvalResults() {
+ return evalResults;
+ }
+
+ public GenieListEvalResultsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieListEvalResultsResponse that = (GenieListEvalResultsResponse) o;
+ return Objects.equals(evalResults, that.evalResults)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(evalResults, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieListEvalResultsResponse.class)
+ .add("evalResults", evalResults)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsRequest.java
new file mode 100755
index 000000000..943ca06e6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieListEvalRunsRequest {
+ /** Maximum number of evaluation runs to return per page */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** Token to get the next page of results */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** The ID associated with the Genie space where the evaluation run is located. */
+ @JsonIgnore private String spaceId;
+
+ public GenieListEvalRunsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public GenieListEvalRunsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public GenieListEvalRunsRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieListEvalRunsRequest that = (GenieListEvalRunsRequest) o;
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieListEvalRunsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsResponse.java
new file mode 100755
index 000000000..f9f97d20f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsResponse.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GenieListEvalRunsResponse {
+ /** List of evaluation runs for a space on provided page token and page size */
+ @JsonProperty("eval_runs")
+ private Collection evalRuns;
+
+ /** The token to use for retrieving the next page of results. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public GenieListEvalRunsResponse setEvalRuns(Collection evalRuns) {
+ this.evalRuns = evalRuns;
+ return this;
+ }
+
+ public Collection getEvalRuns() {
+ return evalRuns;
+ }
+
+ public GenieListEvalRunsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieListEvalRunsResponse that = (GenieListEvalRunsResponse) o;
+ return Objects.equals(evalRuns, that.evalRuns)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(evalRuns, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieListEvalRunsResponse.class)
+ .add("evalRuns", evalRuns)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
index ccf915471..e6fd1a6a9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
@@ -72,6 +72,23 @@ GenieGetMessageQueryResultResponse executeMessageQuery(
GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
GenieGenerateDownloadFullQueryResultRequest genieGenerateDownloadFullQueryResultRequest);
+ /** Create and run evaluations for multiple benchmark questions in a Genie space. */
+ GenieEvalRunResponse genieCreateEvalRun(GenieCreateEvalRunRequest genieCreateEvalRunRequest);
+
+ /** Get details for evaluation results. */
+ GenieEvalResultDetails genieGetEvalResultDetails(
+ GenieGetEvalResultDetailsRequest genieGetEvalResultDetailsRequest);
+
+ /** Get evaluation run details. */
+ GenieEvalRunResponse genieGetEvalRun(GenieGetEvalRunRequest genieGetEvalRunRequest);
+
+ /** List evaluation results for a specific evaluation run. */
+ GenieListEvalResultsResponse genieListEvalResults(
+ GenieListEvalResultsRequest genieListEvalResultsRequest);
+
+ /** Lists all evaluation runs in a space. */
+ GenieListEvalRunsResponse genieListEvalRuns(GenieListEvalRunsRequest genieListEvalRunsRequest);
+
/**
* After [Generating a Full Query Result Download](:method:genie/generatedownloadfullqueryresult)
* and successfully receiving a `download_id` and `download_id_signature`, use this API to poll
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ScoreReason.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ScoreReason.java
new file mode 100755
index 000000000..f7c793d4b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ScoreReason.java
@@ -0,0 +1,34 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ScoreReason {
+ COLUMN_TYPE_DIFFERENCE,
+ EMPTY_GOOD_SQL,
+ EMPTY_RESULT,
+ LLM_JUDGE_FORMATTING_ERROR,
+ LLM_JUDGE_INCOMPLETE_OR_PARTIAL_OUTPUT,
+ LLM_JUDGE_INCORRECT_FUNCTION_USAGE,
+ LLM_JUDGE_INCORRECT_METRIC_CALCULATION,
+ LLM_JUDGE_INCORRECT_TABLE_OR_FIELD_USAGE,
+ LLM_JUDGE_INSTRUCTION_COMPLIANCE_OR_MISSING_BUSINESS_LOGIC,
+ LLM_JUDGE_MISINTERPRETATION_OF_USER_REQUEST,
+ LLM_JUDGE_MISSING_JOIN,
+ LLM_JUDGE_MISSING_OR_INCORRECT_AGGREGATION,
+ LLM_JUDGE_MISSING_OR_INCORRECT_FILTER,
+ LLM_JUDGE_MISSING_OR_INCORRECT_JOIN,
+ LLM_JUDGE_OTHER,
+ LLM_JUDGE_SEMANTIC_ERROR,
+ LLM_JUDGE_SYNTAX_ERROR,
+ LLM_JUDGE_WRONG_AGGREGATION,
+ LLM_JUDGE_WRONG_COLUMNS,
+ LLM_JUDGE_WRONG_FILTER,
+ RESULT_EXTRA_COLUMNS,
+ RESULT_EXTRA_ROWS,
+ RESULT_MISSING_COLUMNS,
+ RESULT_MISSING_ROWS,
+ SINGLE_CELL_DIFFERENCE,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
index c9247a81e..b3213ea76 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Next field marker: 13 */
@Generated
public class DatabaseTable {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
index 8ea0b5343..5e85bb53a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Next field marker: 18 */
@Generated
public class SyncedDatabaseTable {
/** Synced Table data synchronization status */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java
new file mode 100755
index 000000000..f6f795f4d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfig.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Auto-tagging configuration for a classification tag. When enabled, detected columns are
+ * automatically tagged with Unity Catalog tags.
+ */
+@Generated
+public class AutoTaggingConfig {
+ /** Whether auto-tagging is enabled or disabled for this classification tag. */
+ @JsonProperty("auto_tagging_mode")
+ private AutoTaggingConfigAutoTaggingMode autoTaggingMode;
+
+ /** The Classification Tag (e.g., "class.name", "class.location") */
+ @JsonProperty("classification_tag")
+ private String classificationTag;
+
+ public AutoTaggingConfig setAutoTaggingMode(AutoTaggingConfigAutoTaggingMode autoTaggingMode) {
+ this.autoTaggingMode = autoTaggingMode;
+ return this;
+ }
+
+ public AutoTaggingConfigAutoTaggingMode getAutoTaggingMode() {
+ return autoTaggingMode;
+ }
+
+ public AutoTaggingConfig setClassificationTag(String classificationTag) {
+ this.classificationTag = classificationTag;
+ return this;
+ }
+
+ public String getClassificationTag() {
+ return classificationTag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AutoTaggingConfig that = (AutoTaggingConfig) o;
+ return Objects.equals(autoTaggingMode, that.autoTaggingMode)
+ && Objects.equals(classificationTag, that.classificationTag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(autoTaggingMode, classificationTag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AutoTaggingConfig.class)
+ .add("autoTaggingMode", autoTaggingMode)
+ .add("classificationTag", classificationTag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java
new file mode 100755
index 000000000..e20ba94fd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/AutoTaggingConfigAutoTaggingMode.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+
+/** Auto-tagging mode. */
+@Generated
+public enum AutoTaggingConfigAutoTaggingMode {
+ AUTO_TAGGING_DISABLED,
+ AUTO_TAGGING_ENABLED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java
new file mode 100755
index 000000000..d36081ec8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfig.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * Data Classification configuration for a Unity Catalog catalog. This message follows the "At Most
+ * One Resource" pattern: at most one CatalogConfig exists per catalog. - Full CRUD operations are
+ * supported: Create enables Data Classification, Delete disables it - It has no unique identifier
+ * of its own and uses its parent catalog's identifier (catalog_name)
+ */
+@Generated
+public class CatalogConfig {
+ /**
+ * List of auto-tagging configurations for this catalog. Empty list means no auto-tagging is
+ * enabled.
+ */
+ @JsonProperty("auto_tag_configs")
+ private Collection autoTagConfigs;
+
+ /**
+ * Schemas to include in the scan. Empty list is not supported as it results in a no-op scan. If
+ * `included_schemas` is not set, all schemas are scanned.
+ */
+ @JsonProperty("included_schemas")
+ private CatalogConfigSchemaNames includedSchemas;
+
+ /** Resource name in the format: catalogs/{catalog_name}/config. */
+ @JsonProperty("name")
+ private String name;
+
+ public CatalogConfig setAutoTagConfigs(Collection autoTagConfigs) {
+ this.autoTagConfigs = autoTagConfigs;
+ return this;
+ }
+
+ public Collection getAutoTagConfigs() {
+ return autoTagConfigs;
+ }
+
+ public CatalogConfig setIncludedSchemas(CatalogConfigSchemaNames includedSchemas) {
+ this.includedSchemas = includedSchemas;
+ return this;
+ }
+
+ public CatalogConfigSchemaNames getIncludedSchemas() {
+ return includedSchemas;
+ }
+
+ public CatalogConfig setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CatalogConfig that = (CatalogConfig) o;
+ return Objects.equals(autoTagConfigs, that.autoTagConfigs)
+ && Objects.equals(includedSchemas, that.includedSchemas)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(autoTagConfigs, includedSchemas, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CatalogConfig.class)
+ .add("autoTagConfigs", autoTagConfigs)
+ .add("includedSchemas", includedSchemas)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfigSchemaNames.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfigSchemaNames.java
new file mode 100755
index 000000000..a2901aada
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CatalogConfigSchemaNames.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Wrapper message for a list of schema names. */
+@Generated
+public class CatalogConfigSchemaNames {
+ /** */
+ @JsonProperty("names")
+ private Collection names;
+
+ public CatalogConfigSchemaNames setNames(Collection names) {
+ this.names = names;
+ return this;
+ }
+
+ public Collection getNames() {
+ return names;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CatalogConfigSchemaNames that = (CatalogConfigSchemaNames) o;
+ return Objects.equals(names, that.names);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(names);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CatalogConfigSchemaNames.class).add("names", names).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CreateCatalogConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CreateCatalogConfigRequest.java
new file mode 100755
index 000000000..b1dc67f4b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/CreateCatalogConfigRequest.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateCatalogConfigRequest {
+ /** The configuration to create. */
+ @JsonProperty("catalog_config")
+ private CatalogConfig catalogConfig;
+
+ /** Parent resource in the format: catalogs/{catalog_name} */
+ @JsonIgnore private String parent;
+
+ public CreateCatalogConfigRequest setCatalogConfig(CatalogConfig catalogConfig) {
+ this.catalogConfig = catalogConfig;
+ return this;
+ }
+
+ public CatalogConfig getCatalogConfig() {
+ return catalogConfig;
+ }
+
+ public CreateCatalogConfigRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateCatalogConfigRequest that = (CreateCatalogConfigRequest) o;
+ return Objects.equals(catalogConfig, that.catalogConfig) && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogConfig, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateCatalogConfigRequest.class)
+ .add("catalogConfig", catalogConfig)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java
new file mode 100755
index 000000000..2f1999110
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationAPI.java
@@ -0,0 +1,70 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Manage data classification for Unity Catalog catalogs. Data classification automatically
+ * identifies and tags sensitive data (PII) in Unity Catalog tables. Each catalog can have at most
+ * one configuration resource that controls scanning behavior and auto-tagging rules.
+ */
+@Generated
+public class DataClassificationAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DataClassificationAPI.class);
+
+ private final DataClassificationService impl;
+
+ /** Regular-use constructor */
+ public DataClassificationAPI(ApiClient apiClient) {
+ impl = new DataClassificationImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DataClassificationAPI(DataClassificationService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Create Data Classification configuration for a catalog.
+ *
+ * Creates a new config resource, which enables Data Classification for the specified catalog.
+ * - The config must not already exist for the catalog.
+ */
+ public CatalogConfig createCatalogConfig(CreateCatalogConfigRequest request) {
+ return impl.createCatalogConfig(request);
+ }
+
+ public void deleteCatalogConfig(String name) {
+ deleteCatalogConfig(new DeleteCatalogConfigRequest().setName(name));
+ }
+
+ /** Delete Data Classification configuration for a catalog. */
+ public void deleteCatalogConfig(DeleteCatalogConfigRequest request) {
+ impl.deleteCatalogConfig(request);
+ }
+
+ public CatalogConfig getCatalogConfig(String name) {
+ return getCatalogConfig(new GetCatalogConfigRequest().setName(name));
+ }
+
+ /** Get the Data Classification configuration for a catalog. */
+ public CatalogConfig getCatalogConfig(GetCatalogConfigRequest request) {
+ return impl.getCatalogConfig(request);
+ }
+
+ /**
+ * Update the Data Classification configuration for a catalog. - The config must already exist for
+ * the catalog. - Updates fields specified in the update_mask. Use update_mask field to perform
+ * partial updates of the configuration.
+ */
+ public CatalogConfig updateCatalogConfig(UpdateCatalogConfigRequest request) {
+ return impl.updateCatalogConfig(request);
+ }
+
+ public DataClassificationService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java
new file mode 100755
index 000000000..7de258392
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationImpl.java
@@ -0,0 +1,88 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of DataClassification */
+@Generated
+class DataClassificationImpl implements DataClassificationService {
+ private final ApiClient apiClient;
+
+ public DataClassificationImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CatalogConfig createCatalogConfig(CreateCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s/config", request.getParent());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getCatalogConfig()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, CatalogConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteCatalogConfig(DeleteCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public CatalogConfig getCatalogConfig(GetCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, CatalogConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public CatalogConfig updateCatalogConfig(UpdateCatalogConfigRequest request) {
+ String path = String.format("/api/data-classification/v1/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getCatalogConfig()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, CatalogConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java
new file mode 100755
index 000000000..3b8773790
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DataClassificationService.java
@@ -0,0 +1,37 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Manage data classification for Unity Catalog catalogs. Data classification automatically
+ * identifies and tags sensitive data (PII) in Unity Catalog tables. Each catalog can have at most
+ * one configuration resource that controls scanning behavior and auto-tagging rules.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DataClassificationService {
+ /**
+ * Create Data Classification configuration for a catalog.
+ *
+ *
Creates a new config resource, which enables Data Classification for the specified catalog.
+ * - The config must not already exist for the catalog.
+ */
+ CatalogConfig createCatalogConfig(CreateCatalogConfigRequest createCatalogConfigRequest);
+
+ /** Delete Data Classification configuration for a catalog. */
+ void deleteCatalogConfig(DeleteCatalogConfigRequest deleteCatalogConfigRequest);
+
+ /** Get the Data Classification configuration for a catalog. */
+ CatalogConfig getCatalogConfig(GetCatalogConfigRequest getCatalogConfigRequest);
+
+ /**
+ * Update the Data Classification configuration for a catalog. - The config must already exist for
+ * the catalog. - Updates fields specified in the update_mask. Use update_mask field to perform
+ * partial updates of the configuration.
+ */
+ CatalogConfig updateCatalogConfig(UpdateCatalogConfigRequest updateCatalogConfigRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java
new file mode 100755
index 000000000..944b05988
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/DeleteCatalogConfigRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteCatalogConfigRequest {
+ /** Resource name in the format: catalogs/{catalog_name}/config */
+ @JsonIgnore private String name;
+
+ public DeleteCatalogConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteCatalogConfigRequest that = (DeleteCatalogConfigRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteCatalogConfigRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java
new file mode 100755
index 000000000..f227185cd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/GetCatalogConfigRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetCatalogConfigRequest {
+ /** Resource name in the format: catalogs/{catalog_name}/config */
+ @JsonIgnore private String name;
+
+ public GetCatalogConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetCatalogConfigRequest that = (GetCatalogConfigRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetCatalogConfigRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java
new file mode 100755
index 000000000..943125a92
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataclassification/UpdateCatalogConfigRequest.java
@@ -0,0 +1,80 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dataclassification;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateCatalogConfigRequest {
+ /**
+ * The configuration to apply to the catalog. The name field in catalog_config identifies which
+ * resource to update.
+ */
+ @JsonProperty("catalog_config")
+ private CatalogConfig catalogConfig;
+
+ /** Resource name in the format: catalogs/{catalog_name}/config. */
+ @JsonIgnore private String name;
+
+ /** Field mask specifying which fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateCatalogConfigRequest setCatalogConfig(CatalogConfig catalogConfig) {
+ this.catalogConfig = catalogConfig;
+ return this;
+ }
+
+ public CatalogConfig getCatalogConfig() {
+ return catalogConfig;
+ }
+
+ public UpdateCatalogConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateCatalogConfigRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateCatalogConfigRequest that = (UpdateCatalogConfigRequest) o;
+ return Objects.equals(catalogConfig, that.catalogConfig)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogConfig, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateCatalogConfigRequest.class)
+ .add("catalogConfig", catalogConfig)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
index 8ae58f6a5..f7f7dd752 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionLevelsRequest.java
@@ -14,9 +14,9 @@ public class GetPermissionLevelsRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
index 801a423e4..b27841373 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
@@ -14,9 +14,9 @@ public class GetPermissionRequest {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
index 25ba32997..a30c83e0c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java
@@ -20,9 +20,9 @@ public class SetObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
index b7ea0195f..2420a2d73 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java
@@ -20,9 +20,9 @@ public class UpdateObjectPermissions {
/**
* The type of the request object. Can be one of the following: alerts, alertsv2, authorization,
- * clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files,
- * genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos,
- * serving-endpoints, or warehouses.
+ * clusters, cluster-policies, dashboards, database-projects, dbsql-dashboards, directories,
+ * experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries,
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
@JsonIgnore private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java
new file mode 100755
index 000000000..401986af0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeAssistantRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateKnowledgeAssistantRequest {
+ /** The Knowledge Assistant to create. */
+ @JsonProperty("knowledge_assistant")
+ private KnowledgeAssistant knowledgeAssistant;
+
+ public CreateKnowledgeAssistantRequest setKnowledgeAssistant(
+ KnowledgeAssistant knowledgeAssistant) {
+ this.knowledgeAssistant = knowledgeAssistant;
+ return this;
+ }
+
+ public KnowledgeAssistant getKnowledgeAssistant() {
+ return knowledgeAssistant;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateKnowledgeAssistantRequest that = (CreateKnowledgeAssistantRequest) o;
+ return Objects.equals(knowledgeAssistant, that.knowledgeAssistant);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeAssistant);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateKnowledgeAssistantRequest.class)
+ .add("knowledgeAssistant", knowledgeAssistant)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java
new file mode 100755
index 000000000..da7e77346
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/CreateKnowledgeSourceRequest.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateKnowledgeSourceRequest {
+ /** */
+ @JsonProperty("knowledge_source")
+ private KnowledgeSource knowledgeSource;
+
+ /**
+ * Parent resource where this source will be created. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String parent;
+
+ public CreateKnowledgeSourceRequest setKnowledgeSource(KnowledgeSource knowledgeSource) {
+ this.knowledgeSource = knowledgeSource;
+ return this;
+ }
+
+ public KnowledgeSource getKnowledgeSource() {
+ return knowledgeSource;
+ }
+
+ public CreateKnowledgeSourceRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateKnowledgeSourceRequest that = (CreateKnowledgeSourceRequest) o;
+ return Objects.equals(knowledgeSource, that.knowledgeSource)
+ && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeSource, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateKnowledgeSourceRequest.class)
+ .add("knowledgeSource", knowledgeSource)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java
new file mode 100755
index 000000000..60d4638a0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeAssistantRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteKnowledgeAssistantRequest {
+ /**
+ * The resource name of the knowledge assistant to be deleted. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String name;
+
+ public DeleteKnowledgeAssistantRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteKnowledgeAssistantRequest that = (DeleteKnowledgeAssistantRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteKnowledgeAssistantRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java
new file mode 100755
index 000000000..746581a50
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/DeleteKnowledgeSourceRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteKnowledgeSourceRequest {
+ /**
+ * The resource name of the Knowledge Source to delete. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}
+ */
+ @JsonIgnore private String name;
+
+ public DeleteKnowledgeSourceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteKnowledgeSourceRequest that = (DeleteKnowledgeSourceRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteKnowledgeSourceRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java
new file mode 100755
index 000000000..18f65a692
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FileTableSpec.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** FileTableSpec specifies a file table source configuration. */
+@Generated
+public class FileTableSpec {
+ /** The name of the column containing BINARY file content to be indexed. */
+ @JsonProperty("file_col")
+ private String fileCol;
+
+ /** Full UC name of the table, in the format of {CATALOG}.{SCHEMA}.{TABLE_NAME}. */
+ @JsonProperty("table_name")
+ private String tableName;
+
+ public FileTableSpec setFileCol(String fileCol) {
+ this.fileCol = fileCol;
+ return this;
+ }
+
+ public String getFileCol() {
+ return fileCol;
+ }
+
+ public FileTableSpec setTableName(String tableName) {
+ this.tableName = tableName;
+ return this;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FileTableSpec that = (FileTableSpec) o;
+ return Objects.equals(fileCol, that.fileCol) && Objects.equals(tableName, that.tableName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fileCol, tableName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FileTableSpec.class)
+ .add("fileCol", fileCol)
+ .add("tableName", tableName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java
new file mode 100755
index 000000000..14de72a5c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/FilesSpec.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** FilesSpec specifies a files source configuration. */
+@Generated
+public class FilesSpec {
+ /** A UC volume path that includes a list of files. */
+ @JsonProperty("path")
+ private String path;
+
+ public FilesSpec setPath(String path) {
+ this.path = path;
+ return this;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FilesSpec that = (FilesSpec) o;
+ return Objects.equals(path, that.path);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(path);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FilesSpec.class).add("path", path).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java
new file mode 100755
index 000000000..e722347bf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeAssistantRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetKnowledgeAssistantRequest {
+ /**
+ * The resource name of the knowledge assistant. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String name;
+
+ public GetKnowledgeAssistantRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKnowledgeAssistantRequest that = (GetKnowledgeAssistantRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKnowledgeAssistantRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java
new file mode 100755
index 000000000..268d77a0c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/GetKnowledgeSourceRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetKnowledgeSourceRequest {
+ /**
+ * The resource name of the Knowledge Source. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}
+ */
+ @JsonIgnore private String name;
+
+ public GetKnowledgeSourceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKnowledgeSourceRequest that = (GetKnowledgeSourceRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKnowledgeSourceRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java
new file mode 100755
index 000000000..a30bde9c6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/IndexSpec.java
@@ -0,0 +1,75 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** IndexSpec specifies a vector search index source configuration. */
+@Generated
+public class IndexSpec {
+ /** The column that specifies a link or reference to where the information came from. */
+ @JsonProperty("doc_uri_col")
+ private String docUriCol;
+
+ /** Full UC name of the vector search index, in the format of {CATALOG}.{SCHEMA}.{INDEX_NAME}. */
+ @JsonProperty("index_name")
+ private String indexName;
+
+ /** The column that includes the document text for retrieval. */
+ @JsonProperty("text_col")
+ private String textCol;
+
+ public IndexSpec setDocUriCol(String docUriCol) {
+ this.docUriCol = docUriCol;
+ return this;
+ }
+
+ public String getDocUriCol() {
+ return docUriCol;
+ }
+
+ public IndexSpec setIndexName(String indexName) {
+ this.indexName = indexName;
+ return this;
+ }
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ public IndexSpec setTextCol(String textCol) {
+ this.textCol = textCol;
+ return this;
+ }
+
+ public String getTextCol() {
+ return textCol;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ IndexSpec that = (IndexSpec) o;
+ return Objects.equals(docUriCol, that.docUriCol)
+ && Objects.equals(indexName, that.indexName)
+ && Objects.equals(textCol, that.textCol);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(docUriCol, indexName, textCol);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(IndexSpec.class)
+ .add("docUriCol", docUriCol)
+ .add("indexName", indexName)
+ .add("textCol", textCol)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java
new file mode 100755
index 000000000..b26ecda26
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistant.java
@@ -0,0 +1,223 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.Timestamp;
+import java.util.Objects;
+
+/**
+ * Entity message that represents a knowledge assistant. Note: REQUIRED annotations below represent
+ * create-time requirements. For updates, required fields are determined by the update mask.
+ */
+@Generated
+public class KnowledgeAssistant {
+ /** Creation timestamp. */
+ @JsonProperty("create_time")
+ private Timestamp createTime;
+
+ /** The creator of the Knowledge Assistant. */
+ @JsonProperty("creator")
+ private String creator;
+
+ /**
+ * Description of what this agent can do (user-facing). Required when creating a Knowledge
+ * Assistant. When updating a Knowledge Assistant, optional unless included in update_mask.
+ */
+ @JsonProperty("description")
+ private String description;
+
+ /**
+ * The display name of the Knowledge Assistant, unique at workspace level. Required when creating
+ * a Knowledge Assistant. When updating a Knowledge Assistant, optional unless included in
+ * update_mask.
+ */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** The name of the knowledge assistant agent endpoint. */
+ @JsonProperty("endpoint_name")
+ private String endpointName;
+
+ /** Error details when the Knowledge Assistant is in FAILED state. */
+ @JsonProperty("error_info")
+ private String errorInfo;
+
+ /** The MLflow experiment ID. */
+ @JsonProperty("experiment_id")
+ private String experimentId;
+
+ /** The universally unique identifier (UUID) of the Knowledge Assistant. */
+ @JsonProperty("id")
+ private String id;
+
+ /**
+ * Additional global instructions on how the agent should generate answers. Optional on create and
+ * update. When updating a Knowledge Assistant, include this field in update_mask to modify it.
+ */
+ @JsonProperty("instructions")
+ private String instructions;
+
+ /**
+ * The resource name of the Knowledge Assistant. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /** State of the Knowledge Assistant. Not returned in List responses. */
+ @JsonProperty("state")
+ private KnowledgeAssistantState state;
+
+ public KnowledgeAssistant setCreateTime(Timestamp createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public Timestamp getCreateTime() {
+ return createTime;
+ }
+
+ public KnowledgeAssistant setCreator(String creator) {
+ this.creator = creator;
+ return this;
+ }
+
+ public String getCreator() {
+ return creator;
+ }
+
+ public KnowledgeAssistant setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public KnowledgeAssistant setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public KnowledgeAssistant setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ public KnowledgeAssistant setErrorInfo(String errorInfo) {
+ this.errorInfo = errorInfo;
+ return this;
+ }
+
+ public String getErrorInfo() {
+ return errorInfo;
+ }
+
+ public KnowledgeAssistant setExperimentId(String experimentId) {
+ this.experimentId = experimentId;
+ return this;
+ }
+
+ public String getExperimentId() {
+ return experimentId;
+ }
+
+ public KnowledgeAssistant setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public KnowledgeAssistant setInstructions(String instructions) {
+ this.instructions = instructions;
+ return this;
+ }
+
+ public String getInstructions() {
+ return instructions;
+ }
+
+ public KnowledgeAssistant setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public KnowledgeAssistant setState(KnowledgeAssistantState state) {
+ this.state = state;
+ return this;
+ }
+
+ public KnowledgeAssistantState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistant that = (KnowledgeAssistant) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(creator, that.creator)
+ && Objects.equals(description, that.description)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(endpointName, that.endpointName)
+ && Objects.equals(errorInfo, that.errorInfo)
+ && Objects.equals(experimentId, that.experimentId)
+ && Objects.equals(id, that.id)
+ && Objects.equals(instructions, that.instructions)
+ && Objects.equals(name, that.name)
+ && Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ creator,
+ description,
+ displayName,
+ endpointName,
+ errorInfo,
+ experimentId,
+ id,
+ instructions,
+ name,
+ state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistant.class)
+ .add("createTime", createTime)
+ .add("creator", creator)
+ .add("description", description)
+ .add("displayName", displayName)
+ .add("endpointName", endpointName)
+ .add("errorInfo", errorInfo)
+ .add("experimentId", experimentId)
+ .add("id", id)
+ .add("instructions", instructions)
+ .add("name", name)
+ .add("state", state)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java
new file mode 100755
index 000000000..3a47b2cbb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantState.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum KnowledgeAssistantState {
+ ACTIVE,
+ CREATING,
+ FAILED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java
new file mode 100755
index 000000000..47c062573
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsAPI.java
@@ -0,0 +1,129 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Manage Knowledge Assistants and related resources. */
+@Generated
+public class KnowledgeAssistantsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(KnowledgeAssistantsAPI.class);
+
+ private final KnowledgeAssistantsService impl;
+
+ /** Regular-use constructor */
+ public KnowledgeAssistantsAPI(ApiClient apiClient) {
+ impl = new KnowledgeAssistantsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public KnowledgeAssistantsAPI(KnowledgeAssistantsService mock) {
+ impl = mock;
+ }
+
+ /** Creates a Knowledge Assistant. */
+ public KnowledgeAssistant createKnowledgeAssistant(CreateKnowledgeAssistantRequest request) {
+ return impl.createKnowledgeAssistant(request);
+ }
+
+ /** Creates a Knowledge Source under a Knowledge Assistant. */
+ public KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest request) {
+ return impl.createKnowledgeSource(request);
+ }
+
+ public void deleteKnowledgeAssistant(String name) {
+ deleteKnowledgeAssistant(new DeleteKnowledgeAssistantRequest().setName(name));
+ }
+
+ /** Deletes a Knowledge Assistant. */
+ public void deleteKnowledgeAssistant(DeleteKnowledgeAssistantRequest request) {
+ impl.deleteKnowledgeAssistant(request);
+ }
+
+ public void deleteKnowledgeSource(String name) {
+ deleteKnowledgeSource(new DeleteKnowledgeSourceRequest().setName(name));
+ }
+
+ /** Deletes a Knowledge Source. */
+ public void deleteKnowledgeSource(DeleteKnowledgeSourceRequest request) {
+ impl.deleteKnowledgeSource(request);
+ }
+
+ public KnowledgeAssistant getKnowledgeAssistant(String name) {
+ return getKnowledgeAssistant(new GetKnowledgeAssistantRequest().setName(name));
+ }
+
+ /** Gets a Knowledge Assistant. */
+ public KnowledgeAssistant getKnowledgeAssistant(GetKnowledgeAssistantRequest request) {
+ return impl.getKnowledgeAssistant(request);
+ }
+
+ public KnowledgeSource getKnowledgeSource(String name) {
+ return getKnowledgeSource(new GetKnowledgeSourceRequest().setName(name));
+ }
+
+ /** Gets a Knowledge Source. */
+ public KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest request) {
+ return impl.getKnowledgeSource(request);
+ }
+
+ /** List Knowledge Assistants */
+ public Iterable listKnowledgeAssistants(
+ ListKnowledgeAssistantsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listKnowledgeAssistants,
+ ListKnowledgeAssistantsResponse::getKnowledgeAssistants,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public Iterable listKnowledgeSources(String parent) {
+ return listKnowledgeSources(new ListKnowledgeSourcesRequest().setParent(parent));
+ }
+
+ /** Lists Knowledge Sources under a Knowledge Assistant. */
+ public Iterable listKnowledgeSources(ListKnowledgeSourcesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listKnowledgeSources,
+ ListKnowledgeSourcesResponse::getKnowledgeSources,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ /**
+ * Sync all non-index Knowledge Sources for a Knowledge Assistant (index sources do not require
+ * sync)
+ */
+ public void syncKnowledgeSources(SyncKnowledgeSourcesRequest request) {
+ impl.syncKnowledgeSources(request);
+ }
+
+ /** Updates a Knowledge Assistant. */
+ public KnowledgeAssistant updateKnowledgeAssistant(UpdateKnowledgeAssistantRequest request) {
+ return impl.updateKnowledgeAssistant(request);
+ }
+
+ /** Updates a Knowledge Source. */
+ public KnowledgeSource updateKnowledgeSource(UpdateKnowledgeSourceRequest request) {
+ return impl.updateKnowledgeSource(request);
+ }
+
+ public KnowledgeAssistantsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java
new file mode 100755
index 000000000..14747a16d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsImpl.java
@@ -0,0 +1,212 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of KnowledgeAssistants */
+@Generated
+class KnowledgeAssistantsImpl implements KnowledgeAssistantsService {
+ private final ApiClient apiClient;
+
+ public KnowledgeAssistantsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public KnowledgeAssistant createKnowledgeAssistant(CreateKnowledgeAssistantRequest request) {
+ String path = "/api/2.1/knowledge-assistants";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getKnowledgeAssistant()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeAssistant.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest request) {
+ String path = String.format("/api/2.1/%s/knowledge-sources", request.getParent());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getKnowledgeSource()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeSource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteKnowledgeAssistant(DeleteKnowledgeAssistantRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteKnowledgeSource(DeleteKnowledgeSourceRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public KnowledgeAssistant getKnowledgeAssistant(GetKnowledgeAssistantRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeAssistant.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeSource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListKnowledgeAssistantsResponse listKnowledgeAssistants(
+ ListKnowledgeAssistantsRequest request) {
+ String path = "/api/2.1/knowledge-assistants";
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, ListKnowledgeAssistantsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListKnowledgeSourcesResponse listKnowledgeSources(ListKnowledgeSourcesRequest request) {
+ String path = String.format("/api/2.1/%s/knowledge-sources", request.getParent());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, ListKnowledgeSourcesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void syncKnowledgeSources(SyncKnowledgeSourcesRequest request) {
+ String path = String.format("/api/2.1/%s/knowledge-sources:sync", request.getName());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public KnowledgeAssistant updateKnowledgeAssistant(UpdateKnowledgeAssistantRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req =
+ new Request("PATCH", path, apiClient.serialize(request.getKnowledgeAssistant()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeAssistant.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public KnowledgeSource updateKnowledgeSource(UpdateKnowledgeSourceRequest request) {
+ String path = String.format("/api/2.1/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getKnowledgeSource()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, KnowledgeSource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java
new file mode 100755
index 000000000..cb70f6b10
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeAssistantsService.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Manage Knowledge Assistants and related resources.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface KnowledgeAssistantsService {
+ /** Creates a Knowledge Assistant. */
+ KnowledgeAssistant createKnowledgeAssistant(
+ CreateKnowledgeAssistantRequest createKnowledgeAssistantRequest);
+
+ /** Creates a Knowledge Source under a Knowledge Assistant. */
+ KnowledgeSource createKnowledgeSource(CreateKnowledgeSourceRequest createKnowledgeSourceRequest);
+
+ /** Deletes a Knowledge Assistant. */
+ void deleteKnowledgeAssistant(DeleteKnowledgeAssistantRequest deleteKnowledgeAssistantRequest);
+
+ /** Deletes a Knowledge Source. */
+ void deleteKnowledgeSource(DeleteKnowledgeSourceRequest deleteKnowledgeSourceRequest);
+
+ /** Gets a Knowledge Assistant. */
+ KnowledgeAssistant getKnowledgeAssistant(
+ GetKnowledgeAssistantRequest getKnowledgeAssistantRequest);
+
+ /** Gets a Knowledge Source. */
+ KnowledgeSource getKnowledgeSource(GetKnowledgeSourceRequest getKnowledgeSourceRequest);
+
+ /** List Knowledge Assistants */
+ ListKnowledgeAssistantsResponse listKnowledgeAssistants(
+ ListKnowledgeAssistantsRequest listKnowledgeAssistantsRequest);
+
+ /** Lists Knowledge Sources under a Knowledge Assistant. */
+ ListKnowledgeSourcesResponse listKnowledgeSources(
+ ListKnowledgeSourcesRequest listKnowledgeSourcesRequest);
+
+ /**
+ * Sync all non-index Knowledge Sources for a Knowledge Assistant (index sources do not require
+ * sync)
+ */
+ void syncKnowledgeSources(SyncKnowledgeSourcesRequest syncKnowledgeSourcesRequest);
+
+ /** Updates a Knowledge Assistant. */
+ KnowledgeAssistant updateKnowledgeAssistant(
+ UpdateKnowledgeAssistantRequest updateKnowledgeAssistantRequest);
+
+ /** Updates a Knowledge Source. */
+ KnowledgeSource updateKnowledgeSource(UpdateKnowledgeSourceRequest updateKnowledgeSourceRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java
new file mode 100755
index 000000000..705511930
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSource.java
@@ -0,0 +1,227 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.Timestamp;
+import java.util.Objects;
+
+/**
+ * KnowledgeSource represents a source of knowledge for the KnowledgeAssistant. Used in
+ * create/update requests and returned in Get/List responses. Note: REQUIRED annotations below
+ * represent create-time requirements. For updates, required fields are determined by the update
+ * mask.
+ */
+@Generated
+public class KnowledgeSource {
+ /** Timestamp when this knowledge source was created. */
+ @JsonProperty("create_time")
+ private Timestamp createTime;
+
+ /**
+ * Description of the knowledge source. Required when creating a Knowledge Source. When updating a
+ * Knowledge Source, optional unless included in update_mask.
+ */
+ @JsonProperty("description")
+ private String description;
+
+ /**
+ * Human-readable display name of the knowledge source. Required when creating a Knowledge Source.
+ * When updating a Knowledge Source, optional unless included in update_mask.
+ */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** */
+ @JsonProperty("file_table")
+ private FileTableSpec fileTable;
+
+ /** */
+ @JsonProperty("files")
+ private FilesSpec files;
+
+ /** */
+ @JsonProperty("id")
+ private String id;
+
+ /** */
+ @JsonProperty("index")
+ private IndexSpec index;
+
+ /**
+ * Timestamp representing the cutoff before which content in this knowledge source is being
+ * ingested.
+ */
+ @JsonProperty("knowledge_cutoff_time")
+ private Timestamp knowledgeCutoffTime;
+
+ /**
+ * Full resource name:
+ * knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * The type of the source: "index", "files", or "file_table". Required when creating a Knowledge
+ * Source. When updating a Knowledge Source, this field is ignored.
+ */
+ @JsonProperty("source_type")
+ private String sourceType;
+
+ /** */
+ @JsonProperty("state")
+ private KnowledgeSourceState state;
+
+ public KnowledgeSource setCreateTime(Timestamp createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public Timestamp getCreateTime() {
+ return createTime;
+ }
+
+ public KnowledgeSource setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public KnowledgeSource setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public KnowledgeSource setFileTable(FileTableSpec fileTable) {
+ this.fileTable = fileTable;
+ return this;
+ }
+
+ public FileTableSpec getFileTable() {
+ return fileTable;
+ }
+
+ public KnowledgeSource setFiles(FilesSpec files) {
+ this.files = files;
+ return this;
+ }
+
+ public FilesSpec getFiles() {
+ return files;
+ }
+
+ public KnowledgeSource setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public KnowledgeSource setIndex(IndexSpec index) {
+ this.index = index;
+ return this;
+ }
+
+ public IndexSpec getIndex() {
+ return index;
+ }
+
+ public KnowledgeSource setKnowledgeCutoffTime(Timestamp knowledgeCutoffTime) {
+ this.knowledgeCutoffTime = knowledgeCutoffTime;
+ return this;
+ }
+
+ public Timestamp getKnowledgeCutoffTime() {
+ return knowledgeCutoffTime;
+ }
+
+ public KnowledgeSource setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public KnowledgeSource setSourceType(String sourceType) {
+ this.sourceType = sourceType;
+ return this;
+ }
+
+ public String getSourceType() {
+ return sourceType;
+ }
+
+ public KnowledgeSource setState(KnowledgeSourceState state) {
+ this.state = state;
+ return this;
+ }
+
+ public KnowledgeSourceState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeSource that = (KnowledgeSource) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(description, that.description)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(fileTable, that.fileTable)
+ && Objects.equals(files, that.files)
+ && Objects.equals(id, that.id)
+ && Objects.equals(index, that.index)
+ && Objects.equals(knowledgeCutoffTime, that.knowledgeCutoffTime)
+ && Objects.equals(name, that.name)
+ && Objects.equals(sourceType, that.sourceType)
+ && Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ description,
+ displayName,
+ fileTable,
+ files,
+ id,
+ index,
+ knowledgeCutoffTime,
+ name,
+ sourceType,
+ state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeSource.class)
+ .add("createTime", createTime)
+ .add("description", description)
+ .add("displayName", displayName)
+ .add("fileTable", fileTable)
+ .add("files", files)
+ .add("id", id)
+ .add("index", index)
+ .add("knowledgeCutoffTime", knowledgeCutoffTime)
+ .add("name", name)
+ .add("sourceType", sourceType)
+ .add("state", state)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java
new file mode 100755
index 000000000..f831c5d19
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/KnowledgeSourceState.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum KnowledgeSourceState {
+ FAILED_UPDATE,
+ UPDATED,
+ UPDATING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java
new file mode 100755
index 000000000..228407b6f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsRequest.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListKnowledgeAssistantsRequest {
+ /**
+ * The maximum number of knowledge assistants to return. If unspecified, at most 100 knowledge
+ * assistants will be returned. The maximum value is 100; values above 100 will be coerced to 100.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous `ListKnowledgeAssistants` call. Provide this to retrieve
+ * the subsequent page. If unspecified, the first page will be returned.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListKnowledgeAssistantsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListKnowledgeAssistantsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListKnowledgeAssistantsRequest that = (ListKnowledgeAssistantsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListKnowledgeAssistantsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java
new file mode 100755
index 000000000..ec55ca4b4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeAssistantsResponse.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** A list of Knowledge Assistants. */
+@Generated
+public class ListKnowledgeAssistantsResponse {
+ /** */
+ @JsonProperty("knowledge_assistants")
+ private Collection knowledgeAssistants;
+
+ /**
+ * A token that can be sent as `page_token` to retrieve the next page. If this field is omitted,
+ * there are no subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListKnowledgeAssistantsResponse setKnowledgeAssistants(
+ Collection knowledgeAssistants) {
+ this.knowledgeAssistants = knowledgeAssistants;
+ return this;
+ }
+
+ public Collection getKnowledgeAssistants() {
+ return knowledgeAssistants;
+ }
+
+ public ListKnowledgeAssistantsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListKnowledgeAssistantsResponse that = (ListKnowledgeAssistantsResponse) o;
+ return Objects.equals(knowledgeAssistants, that.knowledgeAssistants)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeAssistants, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListKnowledgeAssistantsResponse.class)
+ .add("knowledgeAssistants", knowledgeAssistants)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesRequest.java
new file mode 100755
index 000000000..9488a1268
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListKnowledgeSourcesRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** Parent resource to list from. Format: knowledge-assistants/{knowledge_assistant_id} */
+ @JsonIgnore private String parent;
+
+ public ListKnowledgeSourcesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListKnowledgeSourcesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListKnowledgeSourcesRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListKnowledgeSourcesRequest that = (ListKnowledgeSourcesRequest) o;
+ return Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(parent, that.parent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken, parent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListKnowledgeSourcesRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("parent", parent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesResponse.java
new file mode 100755
index 000000000..e4cb8b24d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/ListKnowledgeSourcesResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListKnowledgeSourcesResponse {
+ /** */
+ @JsonProperty("knowledge_sources")
+ private Collection knowledgeSources;
+
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListKnowledgeSourcesResponse setKnowledgeSources(
+ Collection knowledgeSources) {
+ this.knowledgeSources = knowledgeSources;
+ return this;
+ }
+
+ public Collection getKnowledgeSources() {
+ return knowledgeSources;
+ }
+
+ public ListKnowledgeSourcesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListKnowledgeSourcesResponse that = (ListKnowledgeSourcesResponse) o;
+ return Objects.equals(knowledgeSources, that.knowledgeSources)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeSources, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListKnowledgeSourcesResponse.class)
+ .add("knowledgeSources", knowledgeSources)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/SyncKnowledgeSourcesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/SyncKnowledgeSourcesRequest.java
new file mode 100755
index 000000000..4cd8ab302
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/SyncKnowledgeSourcesRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class SyncKnowledgeSourcesRequest {
+ /**
+ * The resource name of the Knowledge Assistant. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String name;
+
+ public SyncKnowledgeSourcesRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SyncKnowledgeSourcesRequest that = (SyncKnowledgeSourcesRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SyncKnowledgeSourcesRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeAssistantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeAssistantRequest.java
new file mode 100755
index 000000000..f2ef10436
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeAssistantRequest.java
@@ -0,0 +1,89 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateKnowledgeAssistantRequest {
+ /**
+ * The Knowledge Assistant update payload. Only fields listed in update_mask are updated. REQUIRED
+ * annotations on Knowledge Assistant fields describe create-time requirements and do not mean all
+ * those fields are required for update.
+ */
+ @JsonProperty("knowledge_assistant")
+ private KnowledgeAssistant knowledgeAssistant;
+
+ /**
+ * The resource name of the Knowledge Assistant. Format:
+ * knowledge-assistants/{knowledge_assistant_id}
+ */
+ @JsonIgnore private String name;
+
+ /**
+ * Comma-delimited list of fields to update on the Knowledge Assistant. Allowed values:
+ * `display_name`, `description`, `instructions`. Examples: - `display_name` -
+ * `description,instructions`
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateKnowledgeAssistantRequest setKnowledgeAssistant(
+ KnowledgeAssistant knowledgeAssistant) {
+ this.knowledgeAssistant = knowledgeAssistant;
+ return this;
+ }
+
+ public KnowledgeAssistant getKnowledgeAssistant() {
+ return knowledgeAssistant;
+ }
+
+ public UpdateKnowledgeAssistantRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateKnowledgeAssistantRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateKnowledgeAssistantRequest that = (UpdateKnowledgeAssistantRequest) o;
+ return Objects.equals(knowledgeAssistant, that.knowledgeAssistant)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeAssistant, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateKnowledgeAssistantRequest.class)
+ .add("knowledgeAssistant", knowledgeAssistant)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeSourceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeSourceRequest.java
new file mode 100755
index 000000000..1750025e8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/knowledgeassistants/UpdateKnowledgeSourceRequest.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.knowledgeassistants;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateKnowledgeSourceRequest {
+ /**
+ * The Knowledge Source update payload. Only fields listed in update_mask are updated. REQUIRED
+ * annotations on Knowledge Source fields describe create-time requirements and do not mean all
+ * those fields are required for update.
+ */
+ @JsonProperty("knowledge_source")
+ private KnowledgeSource knowledgeSource;
+
+ /**
+ * The resource name of the Knowledge Source to update. Format:
+ * knowledge-assistants/{knowledge_assistant_id}/knowledge-sources/{knowledge_source_id}
+ */
+ @JsonIgnore private String name;
+
+ /**
+ * Comma-delimited list of fields to update on the Knowledge Source. Allowed values:
+ * `display_name`, `description`. Examples: - `display_name` - `display_name,description`
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateKnowledgeSourceRequest setKnowledgeSource(KnowledgeSource knowledgeSource) {
+ this.knowledgeSource = knowledgeSource;
+ return this;
+ }
+
+ public KnowledgeSource getKnowledgeSource() {
+ return knowledgeSource;
+ }
+
+ public UpdateKnowledgeSourceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateKnowledgeSourceRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateKnowledgeSourceRequest that = (UpdateKnowledgeSourceRequest) o;
+ return Objects.equals(knowledgeSource, that.knowledgeSource)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeSource, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateKnowledgeSourceRequest.class)
+ .add("knowledgeSource", knowledgeSource)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java
index 421e6aabe..4b095a4dc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java
@@ -18,7 +18,10 @@ public class DeltaTableSource {
@JsonProperty("dataframe_schema")
private String dataframeSchema;
- /** The entity columns of the Delta table. */
+ /**
+ * Deprecated: Use Feature.entity instead. Kept for backwards compatibility. The entity columns of
+ * the Delta table.
+ */
@JsonProperty("entity_columns")
private Collection entityColumns;
@@ -33,7 +36,10 @@ public class DeltaTableSource {
@JsonProperty("full_name")
private String fullName;
- /** The timeseries column of the Delta table. */
+ /**
+ * Deprecated: Use Feature.timeseries_column instead. Kept for backwards compatibility. The
+ * timeseries column of the Delta table.
+ */
@JsonProperty("timeseries_column")
private String timeseriesColumn;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java
index e93ad03a4..ad4bd5f41 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java
@@ -14,7 +14,11 @@ public class Feature {
@JsonProperty("description")
private String description;
- /** The filter condition applied to the source data before aggregation. */
+ /**
+ * Deprecated: Use DeltaTableSource.filter_condition or KafkaSource.filter_condition instead. Kept
+ * for backwards compatibility. The filter condition applied to the source data before
+ * aggregation.
+ */
@JsonProperty("filter_condition")
private String filterCondition;
@@ -26,16 +30,19 @@ public class Feature {
@JsonProperty("function")
private Function function;
- /** The input columns from which the feature is computed. */
+ /**
+ * Deprecated: Use AggregationFunction.inputs instead. Kept for backwards compatibility. The input
+ * columns from which the feature is computed.
+ */
@JsonProperty("inputs")
private Collection inputs;
/**
- * WARNING: This field is primarily intended for internal use by Databricks systems and is
- * automatically populated when features are created through Databricks notebooks or jobs. Users
- * should not manually set this field as incorrect values may lead to inaccurate lineage tracking
- * or unexpected behavior. This field will be set by feature-engineering client and should be left
- * unset by SDK and terraform users.
+ * Lineage context information for this feature. WARNING: This field is primarily intended for
+ * internal use by Databricks systems and is automatically populated when features are created
+ * through Databricks notebooks or jobs. Users should not manually set this field as incorrect
+ * values may lead to inaccurate lineage tracking or unexpected behavior. This field will be set
+ * by feature-engineering client and should be left unset by SDK and terraform users.
*/
@JsonProperty("lineage_context")
private LineageContext lineageContext;
@@ -44,7 +51,10 @@ public class Feature {
@JsonProperty("source")
private DataSource source;
- /** The time window in which the feature is computed. */
+ /**
+ * Deprecated: Use Function.aggregation_function.time_window instead. Kept for backwards
+ * compatibility. The time window in which the feature is computed.
+ */
@JsonProperty("time_window")
private TimeWindow timeWindow;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Function.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Function.java
index 06d4390f5..6f9c89bca 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Function.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Function.java
@@ -10,11 +10,17 @@
@Generated
public class Function {
- /** Extra parameters for parameterized functions. */
+ /**
+ * Deprecated: Use the function oneof with AggregationFunction instead. Kept for backwards
+ * compatibility. Extra parameters for parameterized functions.
+ */
@JsonProperty("extra_parameters")
private Collection extraParameters;
- /** The type of the function. */
+ /**
+ * Deprecated: Use the function oneof with AggregationFunction instead. Kept for backwards
+ * compatibility. The type of the function.
+ */
@JsonProperty("function_type")
private FunctionFunctionType functionType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionExtraParameter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionExtraParameter.java
index 7e8b864dd..bb13d2365 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionExtraParameter.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionExtraParameter.java
@@ -7,6 +7,11 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/**
+ * Deprecated: Use typed fields on function-specific messages (e.g.
+ * ApproxPercentileFunction.percentile) or AggregationFunction.ExtraParameter instead. Kept for
+ * backwards compatibility.
+ */
@Generated
public class FunctionExtraParameter {
/** The name of the parameter. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionFunctionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionFunctionType.java
index af97f99b8..edf0921ed 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionFunctionType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FunctionFunctionType.java
@@ -4,6 +4,10 @@
import com.databricks.sdk.support.Generated;
+/**
+ * Deprecated: Use the function-specific messages in AggregationFunction.function_type oneof
+ * instead. Kept for backwards compatibility.
+ */
@Generated
public enum FunctionFunctionType {
APPROX_COUNT_DISTINCT,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java
index 57036af65..62b5fa168 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java
@@ -10,7 +10,10 @@
@Generated
public class KafkaSource {
- /** The entity column identifiers of the Kafka source. */
+ /**
+ * Deprecated: Use Feature.entity instead. Kept for backwards compatibility. The entity column
+ * identifiers of the Kafka source.
+ */
@JsonProperty("entity_column_identifiers")
private Collection entityColumnIdentifiers;
@@ -21,7 +24,10 @@ public class KafkaSource {
@JsonProperty("name")
private String name;
- /** The timeseries column identifier of the Kafka source. */
+ /**
+ * Deprecated: Use Feature.timeseries_column instead. Kept for backwards compatibility. The
+ * timeseries column identifier of the Kafka source.
+ */
@JsonProperty("timeseries_column_identifier")
private ColumnIdentifier timeseriesColumnIdentifier;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java
index c6d63c761..a4dd8def2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java
@@ -17,7 +17,7 @@ public class DeleteTagAssignmentRequest {
/**
* The type of entity to which the tag is assigned. Allowed values are apps, dashboards,
- * geniespaces
+ * geniespaces, notebooks
*/
@JsonIgnore private String entityType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java
index dc1707406..05227b7e1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/GetTagAssignmentRequest.java
@@ -17,7 +17,7 @@ public class GetTagAssignmentRequest {
/**
* The type of entity to which the tag is assigned. Allowed values are apps, dashboards,
- * geniespaces
+ * geniespaces, notebooks
*/
@JsonIgnore private String entityType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java
index 103a7b10c..926202986 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/ListTagAssignmentsRequest.java
@@ -18,7 +18,7 @@ public class ListTagAssignmentsRequest {
/**
* The type of entity to which the tag is assigned. Allowed values are apps, dashboards,
- * geniespaces
+ * geniespaces, notebooks
*/
@JsonIgnore private String entityType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java
index 10b567652..2cddc0d88 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagAssignment.java
@@ -18,7 +18,7 @@ public class TagAssignment {
/**
* The type of entity to which the tag is assigned. Allowed values are apps, dashboards,
- * geniespaces
+ * geniespaces, notebooks
*/
@JsonProperty("entity_type")
private String entityType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java
index f25f26f9b..21f6e0c33 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/UpdateTagAssignmentRequest.java
@@ -19,7 +19,7 @@ public class UpdateTagAssignmentRequest {
/**
* The type of entity to which the tag is assigned. Allowed values are apps, dashboards,
- * geniespaces
+ * geniespaces, notebooks
*/
@JsonIgnore private String entityType;