Skip to content

Commit 1a0b28f

Browse files
Update SDK to d6ecfb0633332a524f52f6ab319b073dd3f7493e
1 parent b6c97c1 commit 1a0b28f

File tree

14 files changed

+272
-93
lines changed

14 files changed

+272
-93
lines changed

.codegen/_openapi_sha

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
5757e4a5f208a1f416f8f94b00febb3118fdb940
1+
d6ecfb0633332a524f52f6ab319b073dd3f7493e

.gitattributes

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1980,6 +1980,7 @@
19801980
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true
19811981
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true
19821982
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true
1983+
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java linguist-generated=true
19831984
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java linguist-generated=true
19841985
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineResponse.java linguist-generated=true
19851986
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true
@@ -2021,6 +2022,7 @@
20212022
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/MaturityLevel.java linguist-generated=true
20222023
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java linguist-generated=true
20232024
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java linguist-generated=true
2025+
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java linguist-generated=true
20242026
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java linguist-generated=true
20252027
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java linguist-generated=true
20262028
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java linguist-generated=true
@@ -2095,6 +2097,7 @@
20952097
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java linguist-generated=true
20962098
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java linguist-generated=true
20972099
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java linguist-generated=true
2100+
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointHosts.java linguist-generated=true
20982101
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java linguist-generated=true
20992102
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java linguist-generated=true
21002103
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java linguist-generated=true

NEXT_CHANGELOG.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,15 @@
1313
### Internal Changes
1414

1515
### API Changes
16+
* Add `fullRefreshWindow` field for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`.
17+
* Add `autoFullRefreshPolicy` field for `com.databricks.sdk.service.pipelines.TableSpecificConfig`.
18+
* Add `hosts` field for `com.databricks.sdk.service.postgres.EndpointStatus`.
19+
* Add `ENDPOINT_TYPE_READ_WRITE` and `ENDPOINT_TYPE_READ_ONLY` enum values for `com.databricks.sdk.service.postgres.EndpointType`.
20+
* Add `DELETED` enum value for `com.databricks.sdk.service.vectorsearch.EndpointStatusState`.
21+
* [Breaking] Change `createBranch()`, `createEndpoint()` and `createProject()` methods for `workspaceClient.postgres()` service with new required argument order.
22+
* Change `branchId` field for `com.databricks.sdk.service.postgres.CreateBranchRequest` to no longer be required.
23+
* Change `endpointId` field for `com.databricks.sdk.service.postgres.CreateEndpointRequest` to no longer be required.
24+
* Change `projectId` field for `com.databricks.sdk.service.postgres.CreateProjectRequest` to no longer be required.
25+
* [Breaking] Remove `host`, `lastActiveTime`, `startTime` and `suspendTime` fields for `com.databricks.sdk.service.postgres.EndpointStatus`.
26+
* [Breaking] Remove `computeLastActiveTime` field for `com.databricks.sdk.service.postgres.ProjectStatus`.
27+
* [Breaking] Remove `READ_WRITE` and `READ_ONLY` enum values for `com.databricks.sdk.service.postgres.EndpointType`.
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
2+
3+
package com.databricks.sdk.service.pipelines;
4+
5+
import com.databricks.sdk.support.Generated;
6+
import com.databricks.sdk.support.ToStringer;
7+
import com.fasterxml.jackson.annotation.JsonProperty;
8+
import java.util.Objects;
9+
10+
/** Policy for auto full refresh. */
11+
@Generated
12+
public class AutoFullRefreshPolicy {
13+
/** (Required, Mutable) Whether to enable auto full refresh or not. */
14+
@JsonProperty("enabled")
15+
private Boolean enabled;
16+
17+
/**
18+
* (Optional, Mutable) Specify the minimum interval in hours between the timestamp at which a
19+
* table was last full refreshed and the current timestamp for triggering auto full If unspecified
20+
* and autoFullRefresh is enabled then by default min_interval_hours is 24 hours.
21+
*/
22+
@JsonProperty("min_interval_hours")
23+
private Long minIntervalHours;
24+
25+
public AutoFullRefreshPolicy setEnabled(Boolean enabled) {
26+
this.enabled = enabled;
27+
return this;
28+
}
29+
30+
public Boolean getEnabled() {
31+
return enabled;
32+
}
33+
34+
public AutoFullRefreshPolicy setMinIntervalHours(Long minIntervalHours) {
35+
this.minIntervalHours = minIntervalHours;
36+
return this;
37+
}
38+
39+
public Long getMinIntervalHours() {
40+
return minIntervalHours;
41+
}
42+
43+
@Override
44+
public boolean equals(Object o) {
45+
if (this == o) return true;
46+
if (o == null || getClass() != o.getClass()) return false;
47+
AutoFullRefreshPolicy that = (AutoFullRefreshPolicy) o;
48+
return Objects.equals(enabled, that.enabled)
49+
&& Objects.equals(minIntervalHours, that.minIntervalHours);
50+
}
51+
52+
@Override
53+
public int hashCode() {
54+
return Objects.hash(enabled, minIntervalHours);
55+
}
56+
57+
@Override
58+
public String toString() {
59+
return new ToStringer(AutoFullRefreshPolicy.class)
60+
.add("enabled", enabled)
61+
.add("minIntervalHours", minIntervalHours)
62+
.toString();
63+
}
64+
}

databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java

100755100644
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@ public class IngestionPipelineDefinition {
1717
@JsonProperty("connection_name")
1818
private String connectionName;
1919

20+
/** (Optional) A window that specifies a set of time ranges for snapshot queries in CDC. */
21+
@JsonProperty("full_refresh_window")
22+
private OperationTimeWindow fullRefreshWindow;
23+
2024
/**
2125
* Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs
2226
* directly without the need to specify a UC connection or ingestion gateway. The `source_catalog`
@@ -73,6 +77,15 @@ public String getConnectionName() {
7377
return connectionName;
7478
}
7579

80+
public IngestionPipelineDefinition setFullRefreshWindow(OperationTimeWindow fullRefreshWindow) {
81+
this.fullRefreshWindow = fullRefreshWindow;
82+
return this;
83+
}
84+
85+
public OperationTimeWindow getFullRefreshWindow() {
86+
return fullRefreshWindow;
87+
}
88+
7689
public IngestionPipelineDefinition setIngestFromUcForeignCatalog(
7790
Boolean ingestFromUcForeignCatalog) {
7891
this.ingestFromUcForeignCatalog = ingestFromUcForeignCatalog;
@@ -144,6 +157,7 @@ public boolean equals(Object o) {
144157
if (o == null || getClass() != o.getClass()) return false;
145158
IngestionPipelineDefinition that = (IngestionPipelineDefinition) o;
146159
return Objects.equals(connectionName, that.connectionName)
160+
&& Objects.equals(fullRefreshWindow, that.fullRefreshWindow)
147161
&& Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog)
148162
&& Objects.equals(ingestionGatewayId, that.ingestionGatewayId)
149163
&& Objects.equals(netsuiteJarPath, that.netsuiteJarPath)
@@ -157,6 +171,7 @@ public boolean equals(Object o) {
157171
public int hashCode() {
158172
return Objects.hash(
159173
connectionName,
174+
fullRefreshWindow,
160175
ingestFromUcForeignCatalog,
161176
ingestionGatewayId,
162177
netsuiteJarPath,
@@ -170,6 +185,7 @@ public int hashCode() {
170185
public String toString() {
171186
return new ToStringer(IngestionPipelineDefinition.class)
172187
.add("connectionName", connectionName)
188+
.add("fullRefreshWindow", fullRefreshWindow)
173189
.add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog)
174190
.add("ingestionGatewayId", ingestionGatewayId)
175191
.add("netsuiteJarPath", netsuiteJarPath)
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
2+
3+
package com.databricks.sdk.service.pipelines;
4+
5+
import com.databricks.sdk.support.Generated;
6+
import com.databricks.sdk.support.ToStringer;
7+
import com.fasterxml.jackson.annotation.JsonProperty;
8+
import java.util.Collection;
9+
import java.util.Objects;
10+
11+
/** Proto representing a window */
12+
@Generated
13+
public class OperationTimeWindow {
14+
/**
15+
* Days of week in which the window is allowed to happen If not specified all days of the week
16+
* will be used.
17+
*/
18+
@JsonProperty("days_of_week")
19+
private Collection<DayOfWeek> daysOfWeek;
20+
21+
/** An integer between 0 and 23 denoting the start hour for the window in the 24-hour day. */
22+
@JsonProperty("start_hour")
23+
private Long startHour;
24+
25+
/**
26+
* Time zone id of window. See
27+
* https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
28+
* for details. If not specified, UTC will be used.
29+
*/
30+
@JsonProperty("time_zone_id")
31+
private String timeZoneId;
32+
33+
public OperationTimeWindow setDaysOfWeek(Collection<DayOfWeek> daysOfWeek) {
34+
this.daysOfWeek = daysOfWeek;
35+
return this;
36+
}
37+
38+
public Collection<DayOfWeek> getDaysOfWeek() {
39+
return daysOfWeek;
40+
}
41+
42+
public OperationTimeWindow setStartHour(Long startHour) {
43+
this.startHour = startHour;
44+
return this;
45+
}
46+
47+
public Long getStartHour() {
48+
return startHour;
49+
}
50+
51+
public OperationTimeWindow setTimeZoneId(String timeZoneId) {
52+
this.timeZoneId = timeZoneId;
53+
return this;
54+
}
55+
56+
public String getTimeZoneId() {
57+
return timeZoneId;
58+
}
59+
60+
@Override
61+
public boolean equals(Object o) {
62+
if (this == o) return true;
63+
if (o == null || getClass() != o.getClass()) return false;
64+
OperationTimeWindow that = (OperationTimeWindow) o;
65+
return Objects.equals(daysOfWeek, that.daysOfWeek)
66+
&& Objects.equals(startHour, that.startHour)
67+
&& Objects.equals(timeZoneId, that.timeZoneId);
68+
}
69+
70+
@Override
71+
public int hashCode() {
72+
return Objects.hash(daysOfWeek, startHour, timeZoneId);
73+
}
74+
75+
@Override
76+
public String toString() {
77+
return new ToStringer(OperationTimeWindow.class)
78+
.add("daysOfWeek", daysOfWeek)
79+
.add("startHour", startHour)
80+
.add("timeZoneId", timeZoneId)
81+
.toString();
82+
}
83+
}

databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,16 @@
1010

1111
@Generated
1212
public class TableSpecificConfig {
13+
/**
14+
* (Optional, Mutable) Policy for auto full refresh, if enabled pipeline will automatically try to
15+
* fix issues by doing a full refresh on the table in the retry run. auto_full_refresh_policy in
16+
* table configuration will override the above level auto_full_refresh_policy. For example, {
17+
* "auto_full_refresh_policy": { "enabled": true, "min_interval_hours": 23, } } If unspecified,
18+
* auto full refresh is disabled.
19+
*/
20+
@JsonProperty("auto_full_refresh_policy")
21+
private AutoFullRefreshPolicy autoFullRefreshPolicy;
22+
1323
/**
1424
* A list of column names to be excluded for the ingestion. When not specified, include_columns
1525
* fully controls what columns to be ingested. When specified, all other columns including future
@@ -66,6 +76,15 @@ public class TableSpecificConfig {
6676
@JsonProperty("workday_report_parameters")
6777
private IngestionPipelineDefinitionWorkdayReportParameters workdayReportParameters;
6878

79+
public TableSpecificConfig setAutoFullRefreshPolicy(AutoFullRefreshPolicy autoFullRefreshPolicy) {
80+
this.autoFullRefreshPolicy = autoFullRefreshPolicy;
81+
return this;
82+
}
83+
84+
public AutoFullRefreshPolicy getAutoFullRefreshPolicy() {
85+
return autoFullRefreshPolicy;
86+
}
87+
6988
public TableSpecificConfig setExcludeColumns(Collection<String> excludeColumns) {
7089
this.excludeColumns = excludeColumns;
7190
return this;
@@ -157,7 +176,8 @@ public boolean equals(Object o) {
157176
if (this == o) return true;
158177
if (o == null || getClass() != o.getClass()) return false;
159178
TableSpecificConfig that = (TableSpecificConfig) o;
160-
return Objects.equals(excludeColumns, that.excludeColumns)
179+
return Objects.equals(autoFullRefreshPolicy, that.autoFullRefreshPolicy)
180+
&& Objects.equals(excludeColumns, that.excludeColumns)
161181
&& Objects.equals(includeColumns, that.includeColumns)
162182
&& Objects.equals(primaryKeys, that.primaryKeys)
163183
&& Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig)
@@ -171,6 +191,7 @@ public boolean equals(Object o) {
171191
@Override
172192
public int hashCode() {
173193
return Objects.hash(
194+
autoFullRefreshPolicy,
174195
excludeColumns,
175196
includeColumns,
176197
primaryKeys,
@@ -185,6 +206,7 @@ public int hashCode() {
185206
@Override
186207
public String toString() {
187208
return new ToStringer(TableSpecificConfig.class)
209+
.add("autoFullRefreshPolicy", autoFullRefreshPolicy)
188210
.add("excludeColumns", excludeColumns)
189211
.add("includeColumns", includeColumns)
190212
.add("primaryKeys", primaryKeys)
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
2+
3+
package com.databricks.sdk.service.postgres;
4+
5+
import com.databricks.sdk.support.Generated;
6+
import com.databricks.sdk.support.ToStringer;
7+
import com.fasterxml.jackson.annotation.JsonProperty;
8+
import java.util.Objects;
9+
10+
/** Encapsulates various hostnames (r/w or r/o, pooled or not) for an endpoint. */
11+
@Generated
12+
public class EndpointHosts {
13+
/**
14+
* The hostname to connect to this endpoint. For read-write endpoints, this is a read-write
15+
* hostname which connects to the primary compute. For read-only endpoints, this is a read-only
16+
* hostname which allows read-only operations.
17+
*/
18+
@JsonProperty("host")
19+
private String host;
20+
21+
public EndpointHosts setHost(String host) {
22+
this.host = host;
23+
return this;
24+
}
25+
26+
public String getHost() {
27+
return host;
28+
}
29+
30+
@Override
31+
public boolean equals(Object o) {
32+
if (this == o) return true;
33+
if (o == null || getClass() != o.getClass()) return false;
34+
EndpointHosts that = (EndpointHosts) o;
35+
return Objects.equals(host, that.host);
36+
}
37+
38+
@Override
39+
public int hashCode() {
40+
return Objects.hash(host);
41+
}
42+
43+
@Override
44+
public String toString() {
45+
return new ToStringer(EndpointHosts.class).add("host", host).toString();
46+
}
47+
}

0 commit comments

Comments
 (0)