From 3afdfa8e3d1f087a887e7514607f876fea06ebb6 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 19 May 2025 12:10:22 +0000 Subject: [PATCH 01/31] Implement direct dataplane access --- .../sdk/core/oauth/DataPlaneTokenSource.java | 102 ++++++++++ .../sdk/core/oauth/EndpointTokenSource.java | 92 +++++++++ .../sdk/core/oauth/TokenEndpointClient.java | 91 +++++++++ .../core/oauth/DataPlaneTokenSourceTest.java | 180 +++++++++++++++++ .../core/oauth/EndpointTokenSourceTest.java | 191 ++++++++++++++++++ .../core/oauth/TokenEndpointClientTest.java | 171 ++++++++++++++++ 6 files changed, 827 insertions(+) create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/TokenEndpointClientTest.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java new file mode 100644 index 000000000..b12a92dd2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java @@ -0,0 +1,102 @@ +package com.databricks.sdk.core.oauth; + +import com.databricks.sdk.core.http.HttpClient; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Manages and provides Databricks data plane tokens. This class is responsible for acquiring and + * caching OAuth tokens that are specific to a particular Databricks data plane service endpoint and + * a set of authorization details. It utilizes a {@link DatabricksOAuthTokenSource} for obtaining + * control plane tokens, which may then be exchanged or used to authorize requests for data plane + * tokens. Cached {@link EndpointTokenSource} instances are used to efficiently reuse tokens for + * repeated requests to the same endpoint with the same authorization context. + */ +public class DataPlaneTokenSource { + private final HttpClient httpClient; + private final DatabricksOAuthTokenSource cpTokenSource; + private final ConcurrentHashMap sourcesCache; + + /** + * Caching key for {@link EndpointTokenSource}, based on endpoint and authorization details. This + * is a value object that uniquely identifies a token source configuration. + */ + private static final class TokenSourceKey { + /** The target service endpoint URL. */ + private final String endpoint; + + /** Specific authorization details for the endpoint. */ + private final String authDetails; + + /** + * Constructs a TokenSourceKey. + * + * @param endpoint The target service endpoint URL. + * @param authDetails Specific authorization details. + */ + public TokenSourceKey(String endpoint, String authDetails) { + this.endpoint = endpoint; + this.authDetails = authDetails; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TokenSourceKey that = (TokenSourceKey) o; + return Objects.equals(endpoint, that.endpoint) + && Objects.equals(authDetails, that.authDetails); + } + + @Override + public int hashCode() { + return Objects.hash(endpoint, authDetails); + } + } + + /** + * Constructs a DataPlaneTokenSource. + * + * @param httpClient The {@link HttpClient} for token requests. + * @param cpTokenSource The {@link DatabricksOAuthTokenSource} for control plane tokens. + * @throws NullPointerException if either parameter is null + */ + public DataPlaneTokenSource(HttpClient httpClient, DatabricksOAuthTokenSource cpTokenSource) { + this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); + this.cpTokenSource = + Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); + this.sourcesCache = new ConcurrentHashMap<>(); + } + + /** + * Retrieves a token for the specified endpoint and authorization details. It uses a cached {@link + * EndpointTokenSource} if available, otherwise creates and caches a new one. + * + * @param endpoint The target data plane service endpoint. + * @param authDetails Authorization details for the endpoint. + * @return The dataplane {@link Token}. + * @throws NullPointerException if either parameter is null + * @throws IllegalArgumentException if either parameter is empty + */ + public Token getToken(String endpoint, String authDetails) { + Objects.requireNonNull(endpoint, "Data plane endpoint URL cannot be null"); + Objects.requireNonNull(authDetails, "Authorization details cannot be null"); + if (endpoint.isEmpty()) { + throw new IllegalArgumentException("Data plane endpoint URL cannot be empty"); + } + if (authDetails.isEmpty()) { + throw new IllegalArgumentException("Authorization details cannot be empty"); + } + TokenSourceKey key = new TokenSourceKey(endpoint, authDetails); + + EndpointTokenSource specificSource = + sourcesCache.computeIfAbsent( + key, k -> new EndpointTokenSource(this.cpTokenSource, k.authDetails, this.httpClient)); + + return specificSource.getToken(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java new file mode 100644 index 000000000..c54e7f6c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java @@ -0,0 +1,92 @@ +package com.databricks.sdk.core.oauth; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.HttpClient; +import java.time.LocalDateTime; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Represents a token source that exchanges a control plane token for an endpoint-specific dataplane + * token. It utilizes an underlying {@link DatabricksOAuthTokenSource} to obtain the initial control + * plane token. + */ +public class EndpointTokenSource extends RefreshableTokenSource { + private static final Logger LOG = LoggerFactory.getLogger(EndpointTokenSource.class); + private static final String JWT_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"; + private static final String GRANT_TYPE_PARAM = "grant_type"; + private static final String AUTHORIZATION_DETAILS_PARAM = "authorization_details"; + private static final String ASSERTION_PARAM = "assertion"; + private static final String TOKEN_ENDPOINT = "/oidc/v1/token"; + + private final DatabricksOAuthTokenSource cpTokenSource; + private final String authDetails; + private final HttpClient httpClient; + + /** + * Constructs a new EndpointTokenSource. + * + * @param cpTokenSource The {@link DatabricksOAuthTokenSource} used to obtain the control plane + * token. + * @param authDetails The authorization details required for the token exchange. + * @param httpClient The {@link HttpClient} used to make the token exchange request. + * @throws IllegalArgumentException if authDetails is empty. + * @throws NullPointerException if any of the parameters are null. + */ + public EndpointTokenSource( + DatabricksOAuthTokenSource cpTokenSource, String authDetails, HttpClient httpClient) { + this.cpTokenSource = + Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); + this.authDetails = Objects.requireNonNull(authDetails, "Authorization details cannot be null"); + if (authDetails.isEmpty()) { + throw new IllegalArgumentException("Authorization details cannot be empty"); + } + this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); + } + + /** + * Fetches an endpoint-specific dataplane token by exchanging a control plane token. + * + *

This method first obtains a control plane token from the configured {@code cpTokenSource}. + * It then uses this token as an assertion along with the provided {@code authDetails} to request + * a new, more scoped dataplane token from the Databricks OAuth token endpoint ({@value + * #TOKEN_ENDPOINT}). + * + * @return A new {@link Token} containing the exchanged dataplane access token, its type, any + * accompanying refresh token, and its expiry time. + * @throws DatabricksException if the token exchange with the OAuth endpoint fails. + * @throws IllegalArgumentException if the token endpoint url is empty. + * @throws NullPointerException if any of the parameters are null. + */ + @Override + protected Token refresh() { + Token cpToken = cpTokenSource.getToken(); + + Map params = new HashMap<>(); + params.put(GRANT_TYPE_PARAM, JWT_GRANT_TYPE); + params.put(AUTHORIZATION_DETAILS_PARAM, authDetails); + params.put(ASSERTION_PARAM, cpToken.getAccessToken()); + + OAuthResponse oauthResponse; + try { + oauthResponse = TokenEndpointClient.requestToken(this.httpClient, TOKEN_ENDPOINT, params); + } catch (DatabricksException | IllegalArgumentException | NullPointerException e) { + LOG.error( + "Failed to exchange control plane token for dataplane token at endpoint {}: {}", + TOKEN_ENDPOINT, + e.getMessage(), + e); + throw e; + } + + LocalDateTime expiry = LocalDateTime.now().plusSeconds(oauthResponse.getExpiresIn()); + return new Token( + oauthResponse.getAccessToken(), + oauthResponse.getTokenType(), + oauthResponse.getRefreshToken(), + expiry); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java new file mode 100644 index 000000000..69883dd24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java @@ -0,0 +1,91 @@ +package com.databricks.sdk.core.oauth; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.FormRequest; +import com.databricks.sdk.core.http.HttpClient; +import com.databricks.sdk.core.http.Response; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Client for interacting with an OAuth token endpoint. + * + *

This class provides a method to request an OAuth token from a specified token endpoint URL + * using the provided HTTP client and request parameters. It handles the HTTP request and parses the + * JSON response into an {@link OAuthResponse} object. + */ +public final class TokenEndpointClient { + private static final Logger LOG = LoggerFactory.getLogger(TokenEndpointClient.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private TokenEndpointClient() {} + + /** + * Requests an OAuth token from the specified token endpoint. + * + * @param httpClient The {@link HttpClient} to use for making the request. + * @param tokenEndpointUrl The URL of the token endpoint. + * @param params A map of parameters to include in the token request. + * @return An {@link OAuthResponse} containing the token information. + * @throws DatabricksException if an error occurs during the token request or response parsing. + * @throws IllegalArgumentException if the token endpoint URL is empty. + * @throws NullPointerException if any of the parameters are null. + */ + public static OAuthResponse requestToken( + HttpClient httpClient, String tokenEndpointUrl, Map params) + throws DatabricksException { + Objects.requireNonNull(httpClient, "HttpClient cannot be null"); + Objects.requireNonNull(params, "Request parameters map cannot be null"); + Objects.requireNonNull(tokenEndpointUrl, "Token endpoint URL cannot be null"); + + if (tokenEndpointUrl.isEmpty()) { + throw new IllegalArgumentException("Token endpoint URL cannot be empty"); + } + + Response rawResponse; + try { + LOG.debug("Requesting token from endpoint: {}", tokenEndpointUrl); + rawResponse = httpClient.execute(new FormRequest(tokenEndpointUrl, params)); + } catch (IOException e) { + LOG.error("Failed to request token from {}: {}", tokenEndpointUrl, e.getMessage(), e); + throw new DatabricksException( + String.format("Failed to request token from %s: %s", tokenEndpointUrl, e.getMessage()), + e); + } + + OAuthResponse response; + try { + response = OBJECT_MAPPER.readValue(rawResponse.getBody(), OAuthResponse.class); + } catch (IOException e) { + LOG.error( + "Failed to parse OAuth response from token endpoint {}: {}", + tokenEndpointUrl, + e.getMessage(), + e); + throw new DatabricksException( + String.format( + "Failed to parse OAuth response from token endpoint %s: %s", + tokenEndpointUrl, e.getMessage()), + e); + } + + if (response.getErrorCode() != null) { + String errorSummary = + response.getErrorSummary() != null ? response.getErrorSummary() : "No summary provided."; + LOG.error( + "Token request to {} failed with error: {} - {}", + tokenEndpointUrl, + response.getErrorCode(), + errorSummary); + throw new DatabricksException( + String.format( + "Token request failed with error: %s - %s", response.getErrorCode(), errorSummary)); + } + LOG.debug("Successfully obtained token response from {}", tokenEndpointUrl); + return response; + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java new file mode 100644 index 000000000..91418798e --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java @@ -0,0 +1,180 @@ +package com.databricks.sdk.core.oauth; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +import com.databricks.sdk.core.http.HttpClient; +import com.databricks.sdk.core.http.Response; +import java.io.IOException; +import java.net.URL; +import java.time.LocalDateTime; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +public class DataPlaneTokenSourceTest { + private static final String TEST_ENDPOINT_1 = "https://endpoint1.databricks.com/"; + private static final String TEST_ENDPOINT_2 = "https://endpoint2.databricks.com/"; + private static final String TEST_AUTH_DETAILS_1 = "{\"aud\":\"aud1\"}"; + private static final String TEST_AUTH_DETAILS_2 = "{\"aud\":\"aud2\"}"; + private static final String TEST_CP_TOKEN = "cp-access-token"; + private static final String TEST_TOKEN_TYPE = "Bearer"; + private static final String TEST_REFRESH_TOKEN = "refresh-token"; + private static final int TEST_EXPIRES_IN = 3600; + + private static Stream provideDataPlaneTokenScenarios() throws Exception { + // Mock DatabricksOAuthTokenSource for control plane token + Token cpToken = + new Token(TEST_CP_TOKEN, TEST_TOKEN_TYPE, null, LocalDateTime.now().plusSeconds(600)); + DatabricksOAuthTokenSource mockCpTokenSource = mock(DatabricksOAuthTokenSource.class); + when(mockCpTokenSource.getToken()).thenReturn(cpToken); + + // --- Mock HttpClient for different scenarios --- + // Success JSON for endpoint1/auth1 + String successJson1 = + "{" + + "\"access_token\":\"dp-access-token1\"," + + "\"token_type\":\"Bearer\"," + + "\"refresh_token\":\"refresh-token\"," + + "\"expires_in\":3600" + + "}"; + HttpClient mockSuccessClient1 = mock(HttpClient.class); + when(mockSuccessClient1.execute(any())) + .thenReturn(new Response(successJson1, 200, "OK", new URL(TEST_ENDPOINT_1))); + + // Success JSON for endpoint2/auth2 + String successJson2 = + "{" + + "\"access_token\":\"dp-access-token2\"," + + "\"token_type\":\"Bearer\"," + + "\"refresh_token\":\"refresh-token\"," + + "\"expires_in\":3600" + + "}"; + HttpClient mockSuccessClient2 = mock(HttpClient.class); + when(mockSuccessClient2.execute(any())) + .thenReturn(new Response(successJson2, 200, "OK", new URL(TEST_ENDPOINT_2))); + + // Error response JSON + String errorJson = + "{" + "\"error\":\"invalid_request\"," + "\"error_description\":\"Bad request\"" + "}"; + HttpClient mockErrorClient = mock(HttpClient.class); + when(mockErrorClient.execute(any())) + .thenReturn(new Response(errorJson, 400, "Bad Request", new URL(TEST_ENDPOINT_1))); + + // IOException scenario + HttpClient mockIOExceptionClient = mock(HttpClient.class); + when(mockIOExceptionClient.execute(any())).thenThrow(new IOException("Network error")); + + // For null cpTokenSource + DatabricksOAuthTokenSource nullCpTokenSource = null; + + // For null httpClient + HttpClient nullHttpClient = null; + + // For null/empty endpoint or authDetails + return Stream.of( + Arguments.of( + "Success: endpoint1/auth1", + TEST_ENDPOINT_1, + TEST_AUTH_DETAILS_1, + mockSuccessClient1, + mockCpTokenSource, + new Token( + "dp-access-token1", + TEST_TOKEN_TYPE, + TEST_REFRESH_TOKEN, + LocalDateTime.now().plusSeconds(TEST_EXPIRES_IN)), + null // No exception + ), + Arguments.of( + "Success: endpoint2/auth2 (different cache key)", + TEST_ENDPOINT_2, + TEST_AUTH_DETAILS_2, + mockSuccessClient2, + mockCpTokenSource, + new Token( + "dp-access-token2", + TEST_TOKEN_TYPE, + TEST_REFRESH_TOKEN, + LocalDateTime.now().plusSeconds(TEST_EXPIRES_IN)), + null), + Arguments.of( + "Error response from endpoint", + TEST_ENDPOINT_1, + TEST_AUTH_DETAILS_1, + mockErrorClient, + mockCpTokenSource, + null, + com.databricks.sdk.core.DatabricksException.class), + Arguments.of( + "IOException from HttpClient", + TEST_ENDPOINT_1, + TEST_AUTH_DETAILS_1, + mockIOExceptionClient, + mockCpTokenSource, + null, + com.databricks.sdk.core.DatabricksException.class), + Arguments.of( + "Null cpTokenSource", + TEST_ENDPOINT_1, + TEST_AUTH_DETAILS_1, + mockSuccessClient1, + nullCpTokenSource, + null, + NullPointerException.class), + Arguments.of( + "Null httpClient", + TEST_ENDPOINT_1, + TEST_AUTH_DETAILS_1, + nullHttpClient, + mockCpTokenSource, + null, + NullPointerException.class), + Arguments.of( + "Null endpoint", + null, + TEST_AUTH_DETAILS_1, + mockSuccessClient1, + mockCpTokenSource, + null, + NullPointerException.class), + Arguments.of( + "Null authDetails", + TEST_ENDPOINT_1, + null, + mockSuccessClient1, + mockCpTokenSource, + null, + NullPointerException.class)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideDataPlaneTokenScenarios") + void testDataPlaneTokenSource( + String testName, + String endpoint, + String authDetails, + HttpClient httpClient, + DatabricksOAuthTokenSource cpTokenSource, + Token expectedToken, + Class expectedException) { + if (expectedException != null) { + assertThrows( + expectedException, + () -> { + DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource); + source.getToken(endpoint, authDetails); + }); + } else { + DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource); + Token token = source.getToken(endpoint, authDetails); + assertNotNull(token); + assertEquals(expectedToken.getAccessToken(), token.getAccessToken()); + assertEquals(expectedToken.getTokenType(), token.getTokenType()); + assertEquals(expectedToken.getRefreshToken(), token.getRefreshToken()); + assertTrue(token.isValid()); + } + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java new file mode 100644 index 000000000..549077690 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java @@ -0,0 +1,191 @@ +package com.databricks.sdk.core.oauth; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.HttpClient; +import com.databricks.sdk.core.http.Response; +import java.io.IOException; +import java.net.URL; +import java.time.LocalDateTime; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class EndpointTokenSourceTest { + private static final String TEST_AUTH_DETAILS = "{\"aud\":\"test-audience\"}"; + private static final String TEST_CP_TOKEN = "cp-access-token"; + private static final String TEST_DP_TOKEN = "dp-access-token"; + private static final String TEST_TOKEN_TYPE = "Bearer"; + private static final String TEST_REFRESH_TOKEN = "refresh-token"; + private static final int TEST_EXPIRES_IN = 3600; + + private static Stream provideEndpointTokenScenarios() throws Exception { + // Success response JSON + String successJson = + "{" + + "\"access_token\":\"" + + TEST_DP_TOKEN + + "\"," + + "\"token_type\":\"" + + TEST_TOKEN_TYPE + + "\"," + + "\"expires_in\":" + + TEST_EXPIRES_IN + + "," + + "\"refresh_token\":\"" + + TEST_REFRESH_TOKEN + + "\"}"; + // Error response JSON + String errorJson = + "{" + + "\"error\":\"invalid_client\"," + + "\"error_description\":\"Client authentication failed\"}"; + // Malformed JSON + String malformedJson = "{not valid json}"; + + // Mock DatabricksOAuthTokenSource for control plane token + Token cpToken = new Token(TEST_CP_TOKEN, TEST_TOKEN_TYPE, LocalDateTime.now().plusMinutes(10)); + DatabricksOAuthTokenSource mockCpTokenSource = mock(DatabricksOAuthTokenSource.class); + when(mockCpTokenSource.getToken()).thenReturn(cpToken); + + // Mock HttpClient for success + HttpClient mockSuccessClient = mock(HttpClient.class); + when(mockSuccessClient.execute(any())) + .thenReturn(new Response(successJson, 200, "OK", new URL("https://test.databricks.com/"))); + + // Mock HttpClient for error response + HttpClient mockErrorClient = mock(HttpClient.class); + when(mockErrorClient.execute(any())) + .thenReturn( + new Response(errorJson, 400, "Bad Request", new URL("https://test.databricks.com/"))); + + // Mock HttpClient for malformed JSON + HttpClient mockMalformedClient = mock(HttpClient.class); + when(mockMalformedClient.execute(any())) + .thenReturn( + new Response(malformedJson, 200, "OK", new URL("https://test.databricks.com/"))); + + // Mock HttpClient for IOException + HttpClient mockIOExceptionClient = mock(HttpClient.class); + when(mockIOExceptionClient.execute(any())).thenThrow(new IOException("Network error")); + + return Stream.of( + Arguments.of( + "Success response", + mockCpTokenSource, + TEST_AUTH_DETAILS, + mockSuccessClient, + null, // No exception expected + TEST_DP_TOKEN, + TEST_TOKEN_TYPE, + TEST_REFRESH_TOKEN, + TEST_EXPIRES_IN), + Arguments.of( + "OAuth error response", + mockCpTokenSource, + TEST_AUTH_DETAILS, + mockErrorClient, + DatabricksException.class, + null, + null, + null, + 0), + Arguments.of( + "Malformed JSON response", + mockCpTokenSource, + TEST_AUTH_DETAILS, + mockMalformedClient, + DatabricksException.class, + null, + null, + null, + 0), + Arguments.of( + "IOException from HttpClient", + mockCpTokenSource, + TEST_AUTH_DETAILS, + mockIOExceptionClient, + DatabricksException.class, + null, + null, + null, + 0), + Arguments.of( + "Null cpTokenSource", + null, + TEST_AUTH_DETAILS, + mockSuccessClient, + NullPointerException.class, + null, + null, + null, + 0), + Arguments.of( + "Null authDetails", + mockCpTokenSource, + null, + mockSuccessClient, + NullPointerException.class, + null, + null, + null, + 0), + Arguments.of( + "Empty authDetails", + mockCpTokenSource, + "", + mockSuccessClient, + IllegalArgumentException.class, + null, + null, + null, + 0), + Arguments.of( + "Null httpClient", + mockCpTokenSource, + TEST_AUTH_DETAILS, + null, + NullPointerException.class, + null, + null, + null, + 0)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideEndpointTokenScenarios") + void testEndpointTokenSource( + String testName, + DatabricksOAuthTokenSource cpTokenSource, + String authDetails, + HttpClient httpClient, + Class expectedException, + String expectedAccessToken, + String expectedTokenType, + String expectedRefreshToken, + int expectedExpiresIn) { + if (expectedException != null) { + assertThrows( + expectedException, + () -> { + EndpointTokenSource source = + new EndpointTokenSource(cpTokenSource, authDetails, httpClient); + source.getToken(); + }); + } else { + EndpointTokenSource source = new EndpointTokenSource(cpTokenSource, authDetails, httpClient); + Token token = source.getToken(); + assertNotNull(token); + assertEquals(expectedAccessToken, token.getAccessToken()); + assertEquals(expectedTokenType, token.getTokenType()); + assertEquals(expectedRefreshToken, token.getRefreshToken()); + // Allow a few seconds of clock skew for expiry + assertTrue(token.isValid()); + assertTrue(token.getAccessToken().length() > 0); + } + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/TokenEndpointClientTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/TokenEndpointClientTest.java new file mode 100644 index 000000000..581c90143 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/TokenEndpointClientTest.java @@ -0,0 +1,171 @@ +package com.databricks.sdk.core.oauth; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.FormRequest; +import com.databricks.sdk.core.http.HttpClient; +import com.databricks.sdk.core.http.Response; +import java.io.IOException; +import java.net.URL; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class TokenEndpointClientTest { + private static final String TOKEN_ENDPOINT_URL = "https://test.databricks.com/oauth/token"; + private static final Map PARAMS = new HashMap<>(); + + private static Stream provideTokenScenarios() throws Exception { + // Success response JSON + String successJson = + "{" + + "\"access_token\":\"test-access-token\"," + + "\"token_type\":\"Bearer\"," + + "\"expires_in\":3600," + + "\"refresh_token\":\"test-refresh-token\"}"; + // Error response JSON + String errorJson = + "{" + + "\"error\":\"invalid_client\"," + + "\"error_description\":\"Client authentication failed\"}"; + // Malformed JSON + String malformedJson = "{not valid json}"; + + // Mock HttpClient for success + HttpClient mockSuccessClient = mock(HttpClient.class); + when(mockSuccessClient.execute(any(FormRequest.class))) + .thenReturn(new Response(successJson, 200, "OK", new URL("https://test.databricks.com/"))); + + // Mock HttpClient for error response + HttpClient mockErrorClient = mock(HttpClient.class); + when(mockErrorClient.execute(any(FormRequest.class))) + .thenReturn( + new Response(errorJson, 400, "Bad Request", new URL("https://test.databricks.com/"))); + + // Mock HttpClient for malformed JSON + HttpClient mockMalformedClient = mock(HttpClient.class); + when(mockMalformedClient.execute(any(FormRequest.class))) + .thenReturn( + new Response(malformedJson, 200, "OK", new URL("https://test.databricks.com/"))); + + // Mock HttpClient for IOException + HttpClient mockIOExceptionClient = mock(HttpClient.class); + when(mockIOExceptionClient.execute(any(FormRequest.class))) + .thenThrow(new IOException("Network error")); + + return Stream.of( + Arguments.of( + "Success response", + mockSuccessClient, + TOKEN_ENDPOINT_URL, + PARAMS, + null, // No exception expected + "test-access-token", + "Bearer", + 3600, + "test-refresh-token"), + Arguments.of( + "OAuth error response", + mockErrorClient, + TOKEN_ENDPOINT_URL, + PARAMS, + DatabricksException.class, + null, + null, + 0, + null), + Arguments.of( + "Malformed JSON response", + mockMalformedClient, + TOKEN_ENDPOINT_URL, + PARAMS, + DatabricksException.class, + null, + null, + 0, + null), + Arguments.of( + "IOException from HttpClient", + mockIOExceptionClient, + TOKEN_ENDPOINT_URL, + PARAMS, + DatabricksException.class, + null, + null, + 0, + null), + Arguments.of( + "Null HttpClient", + null, + TOKEN_ENDPOINT_URL, + PARAMS, + NullPointerException.class, + null, + null, + 0, + null), + Arguments.of( + "Null tokenEndpointUrl", + mockSuccessClient, + null, + PARAMS, + NullPointerException.class, + null, + null, + 0, + null), + Arguments.of( + "Empty tokenEndpointUrl", + mockSuccessClient, + "", + PARAMS, + IllegalArgumentException.class, + null, + null, + 0, + null), + Arguments.of( + "Null params", + mockSuccessClient, + TOKEN_ENDPOINT_URL, + null, + NullPointerException.class, + null, + null, + 0, + null)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideTokenScenarios") + void testRequestToken( + String testName, + HttpClient httpClient, + String tokenEndpointUrl, + Map params, + Class expectedException, + String expectedAccessToken, + String expectedTokenType, + int expectedExpiresIn, + String expectedRefreshToken) { + if (expectedException != null) { + assertThrows( + expectedException, + () -> TokenEndpointClient.requestToken(httpClient, tokenEndpointUrl, params)); + } else { + OAuthResponse response = + TokenEndpointClient.requestToken(httpClient, tokenEndpointUrl, params); + assertNotNull(response); + assertEquals(expectedAccessToken, response.getAccessToken()); + assertEquals(expectedTokenType, response.getTokenType()); + assertEquals(expectedExpiresIn, response.getExpiresIn()); + assertEquals(expectedRefreshToken, response.getRefreshToken()); + } + } +} From 34f6f4871bfe8cfb07868dea2c2b264c9139ab5c Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 19 May 2025 12:12:13 +0000 Subject: [PATCH 02/31] Small refactor --- .../sdk/core/DefaultCredentialsProvider.java | 13 +- .../oauth/DatabricksOAuthTokenSource.java | 89 +--- .../oauth/DatabricksOAuthTokenSourceTest.java | 395 ++++++++---------- 3 files changed, 195 insertions(+), 302 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DefaultCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DefaultCredentialsProvider.java index 0e4723f36..f72aa435b 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DefaultCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DefaultCredentialsProvider.java @@ -34,14 +34,6 @@ public NamedIDTokenSource(String name, IDTokenSource idTokenSource) { this.name = name; this.idTokenSource = idTokenSource; } - - public String getName() { - return name; - } - - public IDTokenSource getIdTokenSource() { - return idTokenSource; - } } public DefaultCredentialsProvider() {} @@ -143,14 +135,13 @@ private void addOIDCCredentialsProviders(DatabricksConfig config) { config.getClientId(), config.getHost(), endpoints, - namedIdTokenSource.getIdTokenSource(), + namedIdTokenSource.idTokenSource, config.getHttpClient()) .audience(config.getTokenAudience()) .accountId(config.isAccountClient() ? config.getAccountId() : null) .build(); - providers.add( - new TokenSourceCredentialsProvider(oauthTokenSource, namedIdTokenSource.getName())); + providers.add(new TokenSourceCredentialsProvider(oauthTokenSource, namedIdTokenSource.name)); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSource.java index e642159c0..f16ae2aed 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSource.java @@ -1,15 +1,12 @@ package com.databricks.sdk.core.oauth; import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.core.http.FormRequest; import com.databricks.sdk.core.http.HttpClient; -import com.databricks.sdk.core.http.Response; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; -import java.io.IOException; import java.time.LocalDateTime; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,8 +41,6 @@ public class DatabricksOAuthTokenSource extends RefreshableTokenSource { private static final String SCOPE_PARAM = "scope"; private static final String CLIENT_ID_PARAM = "client_id"; - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - private DatabricksOAuthTokenSource(Builder builder) { this.clientId = builder.clientId; this.host = builder.host; @@ -123,44 +118,29 @@ public DatabricksOAuthTokenSource build() { } } - /** - * Validates that a value is non-null for required fields. If the value is a string, it also - * checks that it is non-empty. - * - * @param value The value to validate. - * @param fieldName The name of the field being validated. - * @throws IllegalArgumentException when the value is null or an empty string. - */ - private static void validate(Object value, String fieldName) { - if (value == null) { - LOG.error("Required parameter '{}' is null", fieldName); - throw new IllegalArgumentException( - String.format("Required parameter '%s' cannot be null", fieldName)); - } - if (value instanceof String && ((String) value).isEmpty()) { - LOG.error("Required parameter '{}' is empty", fieldName); - throw new IllegalArgumentException( - String.format("Required parameter '%s' cannot be empty", fieldName)); - } - } - /** * Retrieves an OAuth token by exchanging an ID token. Implements the OAuth token exchange flow to * obtain an access token. * * @return A Token containing the access token and related information. * @throws DatabricksException when the token exchange fails. - * @throws IllegalArgumentException when there is an error code in the response or when required - * parameters are missing. + * @throws IllegalArgumentException when the required string parameters are empty. + * @throws NullPointerException when any of the required parameters are null. */ @Override public Token refresh() { - // Validate all required parameters - validate(clientId, "ClientID"); - validate(host, "Host"); - validate(endpoints, "Endpoints"); - validate(idTokenSource, "IDTokenSource"); - validate(httpClient, "HttpClient"); + Objects.requireNonNull(clientId, "ClientID cannot be null"); + Objects.requireNonNull(host, "Host cannot be null"); + Objects.requireNonNull(endpoints, "Endpoints cannot be null"); + Objects.requireNonNull(idTokenSource, "IDTokenSource cannot be null"); + Objects.requireNonNull(httpClient, "HttpClient cannot be null"); + + if (clientId.isEmpty()) { + throw new IllegalArgumentException("ClientID cannot be empty"); + } + if (host.isEmpty()) { + throw new IllegalArgumentException("Host cannot be empty"); + } String effectiveAudience = determineAudience(); IDToken idToken = idTokenSource.getIDToken(effectiveAudience); @@ -172,47 +152,20 @@ public Token refresh() { params.put(SCOPE_PARAM, SCOPE); params.put(CLIENT_ID_PARAM, clientId); - Response rawResponse; - try { - rawResponse = httpClient.execute(new FormRequest(endpoints.getTokenEndpoint(), params)); - } catch (IOException e) { - LOG.error( - "Failed to exchange ID token for access token at {}: {}", - endpoints.getTokenEndpoint(), - e.getMessage(), - e); - throw new DatabricksException( - String.format( - "Failed to exchange ID token for access token at %s: %s", - endpoints.getTokenEndpoint(), e.getMessage()), - e); - } - OAuthResponse response; try { - response = OBJECT_MAPPER.readValue(rawResponse.getBody(), OAuthResponse.class); - } catch (IOException e) { + response = + TokenEndpointClient.requestToken(this.httpClient, endpoints.getTokenEndpoint(), params); + } catch (DatabricksException e) { LOG.error( - "Failed to parse OAuth response from token endpoint {}: {}", + "OAuth token exchange failed for client ID '{}' at {}: {}", + this.clientId, endpoints.getTokenEndpoint(), e.getMessage(), e); - throw new DatabricksException( - String.format( - "Failed to parse OAuth response from token endpoint %s: %s", - endpoints.getTokenEndpoint(), e.getMessage())); + throw e; } - if (response.getErrorCode() != null) { - LOG.error( - "Token exchange failed with error: {} - {}", - response.getErrorCode(), - response.getErrorSummary()); - throw new IllegalArgumentException( - String.format( - "Token exchange failed with error: %s - %s", - response.getErrorCode(), response.getErrorSummary())); - } LocalDateTime expiry = LocalDateTime.now().plusSeconds(response.getExpiresIn()); return new Token( response.getAccessToken(), response.getTokenType(), response.getRefreshToken(), expiry); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSourceTest.java index 8d7da8d3a..8217179f2 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DatabricksOAuthTokenSourceTest.java @@ -15,7 +15,6 @@ import java.util.HashMap; import java.util.Map; import java.util.stream.Stream; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Mockito; @@ -35,45 +34,42 @@ class DatabricksOAuthTokenSourceTest { private static final String TEST_AUDIENCE = "test-audience"; private static final String TEST_ACCOUNT_ID = "test-account-id"; - // Error message constants - private static final String ERROR_NULL = "Required parameter '%s' cannot be null"; - private static final String ERROR_EMPTY = "Required parameter '%s' cannot be empty"; - - private IDTokenSource mockIdTokenSource; - - @BeforeEach - void setUp() { - mockIdTokenSource = Mockito.mock(IDTokenSource.class); - IDToken idToken = new IDToken(TEST_ID_TOKEN); - when(mockIdTokenSource.getIDToken(any())).thenReturn(idToken); - } - /** * Test case data for parameterized token source tests. Each case defines a specific OAuth token * exchange scenario. */ private static class TestCase { final String name; // Descriptive name of the test case + final String clientId; // Client ID to use + final String host; // Host to use + final OpenIDConnectEndpoints endpoints; // OIDC endpoints + final IDTokenSource idTokenSource; // ID token source + final HttpClient httpClient; // HTTP client final String audience; // Custom audience value if provided final String accountId; // Account ID if provided final String expectedAudience; // Expected audience used in token exchange - final HttpClient mockHttpClient; // Pre-configured mock HTTP client final Class expectedException; // Expected exception type if any TestCase( String name, + String clientId, + String host, + OpenIDConnectEndpoints endpoints, + IDTokenSource idTokenSource, + HttpClient httpClient, String audience, String accountId, String expectedAudience, - int statusCode, - Object responseBody, - HttpClient mockHttpClient, Class expectedException) { this.name = name; + this.clientId = clientId; + this.host = host; + this.endpoints = endpoints; + this.idTokenSource = idTokenSource; + this.httpClient = httpClient; this.audience = audience; this.accountId = accountId; this.expectedAudience = expectedAudience; - this.mockHttpClient = mockHttpClient; this.expectedException = expectedException; } @@ -87,20 +83,27 @@ public String toString() { * Provides test cases for OAuth token exchange scenarios. Includes success cases with different * audience configurations and various error cases. */ - private static Stream provideTestCases() { - try { - // Success response with valid token data - Map successResponse = new HashMap<>(); - successResponse.put("access_token", TOKEN); - successResponse.put("token_type", TOKEN_TYPE); - successResponse.put("refresh_token", REFRESH_TOKEN); - successResponse.put("expires_in", EXPIRES_IN); + private static Stream provideTestCases() throws MalformedURLException { + // Create valid components for reuse + OpenIDConnectEndpoints testEndpoints = + new OpenIDConnectEndpoints(TEST_TOKEN_ENDPOINT, TEST_AUTHORIZATION_ENDPOINT); + IDTokenSource testIdTokenSource = Mockito.mock(IDTokenSource.class); + IDToken idToken = new IDToken(TEST_ID_TOKEN); + when(testIdTokenSource.getIDToken(any())).thenReturn(idToken); + + // Create success response for token exchange tests + Map successResponse = new HashMap<>(); + successResponse.put("access_token", TOKEN); + successResponse.put("token_type", TOKEN_TYPE); + successResponse.put("refresh_token", REFRESH_TOKEN); + successResponse.put("expires_in", EXPIRES_IN); - // Error response for invalid requests - Map errorResponse = new HashMap<>(); - errorResponse.put("error", "invalid_request"); - errorResponse.put("error_description", "Invalid client ID"); + // Create error response for invalid requests + Map errorResponse = new HashMap<>(); + errorResponse.put("error", "invalid_request"); + errorResponse.put("error_description", "Invalid client ID"); + try { ObjectMapper mapper = new ObjectMapper(); final String errorJson = mapper.writeValueAsString(errorResponse); final String successJson = mapper.writeValueAsString(successResponse); @@ -115,71 +118,162 @@ private static Stream provideTestCases() { FormRequest expectedRequest = new FormRequest(TEST_TOKEN_ENDPOINT, formParams); return Stream.of( - // Success cases with different audience configurations + // Token exchange test cases new TestCase( "Default audience from token endpoint", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), null, null, TEST_TOKEN_ENDPOINT, - 200, - successResponse, - createMockHttpClient(expectedRequest, 200, successJson), null), new TestCase( "Custom audience provided", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), TEST_AUDIENCE, null, TEST_AUDIENCE, - 200, - successResponse, - createMockHttpClient(expectedRequest, 200, successJson), null), new TestCase( "Custom audience takes precedence over account ID", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), TEST_AUDIENCE, TEST_ACCOUNT_ID, TEST_AUDIENCE, - 200, - successResponse, - createMockHttpClient(expectedRequest, 200, successJson), null), new TestCase( "Account ID used as audience when no custom audience", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), null, TEST_ACCOUNT_ID, TEST_ACCOUNT_ID, - 200, - successResponse, - createMockHttpClient(expectedRequest, 200, successJson), null), - // Error cases new TestCase( "Invalid request returns 400", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 400, errorJson), null, null, TEST_TOKEN_ENDPOINT, - 400, - errorJson, - createMockHttpClient(expectedRequest, 400, errorJson), - IllegalArgumentException.class), + DatabricksException.class), new TestCase( "Network error during token exchange", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClientWithError(expectedRequest), null, null, TEST_TOKEN_ENDPOINT, - 0, - null, - createMockHttpClientWithError(expectedRequest), DatabricksException.class), new TestCase( "Invalid JSON response from server", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, "invalid json"), null, null, TEST_TOKEN_ENDPOINT, - 200, - "invalid json", - createMockHttpClient(expectedRequest, 200, "invalid json"), - DatabricksException.class)); + DatabricksException.class), + // Parameter validation test cases + new TestCase( + "Null client ID", + null, + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), + null, + null, + null, + NullPointerException.class), + new TestCase( + "Empty client ID", + "", + TEST_HOST, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), + null, + null, + null, + IllegalArgumentException.class), + new TestCase( + "Null host", + TEST_CLIENT_ID, + null, + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), + null, + null, + null, + NullPointerException.class), + new TestCase( + "Empty host", + TEST_CLIENT_ID, + "", + testEndpoints, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), + null, + null, + null, + IllegalArgumentException.class), + new TestCase( + "Null endpoints", + TEST_CLIENT_ID, + TEST_HOST, + null, + testIdTokenSource, + createMockHttpClient(expectedRequest, 200, successJson), + null, + null, + null, + NullPointerException.class), + new TestCase( + "Null IDTokenSource", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + null, + createMockHttpClient(expectedRequest, 200, successJson), + null, + null, + null, + NullPointerException.class), + new TestCase( + "Null HttpClient", + TEST_CLIENT_ID, + TEST_HOST, + testEndpoints, + testIdTokenSource, + null, + null, + null, + null, + NullPointerException.class)); } catch (IOException e) { throw new RuntimeException("Failed to create test cases", e); } @@ -212,179 +306,34 @@ private static HttpClient createMockHttpClientWithError(FormRequest expectedRequ * Tests OAuth token exchange with various configurations and error scenarios. Verifies correct * audience selection, token exchange, and error handling. */ - @ParameterizedTest(name = "testTokenSource: {arguments}") + @ParameterizedTest(name = "{0}") @MethodSource("provideTestCases") void testTokenSource(TestCase testCase) { - try { - // Create token source with test configuration - OpenIDConnectEndpoints endpoints = - new OpenIDConnectEndpoints(TEST_TOKEN_ENDPOINT, TEST_AUTHORIZATION_ENDPOINT); - - DatabricksOAuthTokenSource.Builder builder = - new DatabricksOAuthTokenSource.Builder( - TEST_CLIENT_ID, TEST_HOST, endpoints, mockIdTokenSource, testCase.mockHttpClient); - - builder.audience(testCase.audience).accountId(testCase.accountId); - - DatabricksOAuthTokenSource tokenSource = builder.build(); - - if (testCase.expectedException != null) { - assertThrows(testCase.expectedException, () -> tokenSource.getToken()); - } else { - // Verify successful token exchange - Token token = tokenSource.getToken(); - assertEquals(TOKEN, token.getAccessToken()); - assertEquals(TOKEN_TYPE, token.getTokenType()); - assertEquals(REFRESH_TOKEN, token.getRefreshToken()); - assertFalse(token.isExpired()); + DatabricksOAuthTokenSource.Builder builder = + new DatabricksOAuthTokenSource.Builder( + testCase.clientId, + testCase.host, + testCase.endpoints, + testCase.idTokenSource, + testCase.httpClient); - // Verify correct audience was used - verify(mockIdTokenSource).getIDToken(testCase.expectedAudience); - } - } catch (IOException e) { - throw new RuntimeException("Test failed", e); - } - } + builder.audience(testCase.audience); + builder.accountId(testCase.accountId); - /** - * Test case data for parameter validation tests. Each case defines a specific validation - * scenario. - */ - private static class ValidationTestCase { - final String name; - final String clientId; - final String host; - final OpenIDConnectEndpoints endpoints; - final IDTokenSource idTokenSource; - final HttpClient httpClient; - final String expectedFieldName; - final boolean isNullTest; + DatabricksOAuthTokenSource tokenSource = builder.build(); - ValidationTestCase( - String name, - String clientId, - String host, - OpenIDConnectEndpoints endpoints, - IDTokenSource idTokenSource, - HttpClient httpClient, - String expectedFieldName, - boolean isNullTest) { - this.name = name; - this.clientId = clientId; - this.host = host; - this.endpoints = endpoints; - this.idTokenSource = idTokenSource; - this.httpClient = httpClient; - this.expectedFieldName = expectedFieldName; - this.isNullTest = isNullTest; - } + if (testCase.expectedException != null) { + assertThrows(testCase.expectedException, () -> tokenSource.getToken()); + } else { + // Verify successful token exchange + Token token = tokenSource.getToken(); + assertEquals(TOKEN, token.getAccessToken()); + assertEquals(TOKEN_TYPE, token.getTokenType()); + assertEquals(REFRESH_TOKEN, token.getRefreshToken()); + assertFalse(token.isExpired()); - @Override - public String toString() { - return name; + // Verify correct audience was used + verify(testCase.idTokenSource, atLeastOnce()).getIDToken(testCase.expectedAudience); } } - - private static Stream provideValidationTestCases() - throws MalformedURLException { - OpenIDConnectEndpoints validEndpoints = - new OpenIDConnectEndpoints(TEST_TOKEN_ENDPOINT, TEST_AUTHORIZATION_ENDPOINT); - HttpClient validHttpClient = Mockito.mock(HttpClient.class); - IDTokenSource validIdTokenSource = Mockito.mock(IDTokenSource.class); - - return Stream.of( - // Client ID validation - new ValidationTestCase( - "Null client ID", - null, - TEST_HOST, - validEndpoints, - validIdTokenSource, - validHttpClient, - "ClientID", - true), - new ValidationTestCase( - "Empty client ID", - "", - TEST_HOST, - validEndpoints, - validIdTokenSource, - validHttpClient, - "ClientID", - false), - // Host validation - new ValidationTestCase( - "Null host", - TEST_CLIENT_ID, - null, - validEndpoints, - validIdTokenSource, - validHttpClient, - "Host", - true), - new ValidationTestCase( - "Empty host", - TEST_CLIENT_ID, - "", - validEndpoints, - validIdTokenSource, - validHttpClient, - "Host", - false), - // Endpoints validation - new ValidationTestCase( - "Null endpoints", - TEST_CLIENT_ID, - TEST_HOST, - null, - validIdTokenSource, - validHttpClient, - "Endpoints", - true), - // IDTokenSource validation - new ValidationTestCase( - "Null IDTokenSource", - TEST_CLIENT_ID, - TEST_HOST, - validEndpoints, - null, - validHttpClient, - "IDTokenSource", - true), - // HttpClient validation - new ValidationTestCase( - "Null HttpClient", - TEST_CLIENT_ID, - TEST_HOST, - validEndpoints, - validIdTokenSource, - null, - "HttpClient", - true)); - } - - /** - * Tests validation of required fields in the token source using parameterized test cases. - * Verifies that null or empty values for required fields cause getToken() to throw - * IllegalArgumentException with specific error messages. - */ - @ParameterizedTest(name = "testParameterValidation: {0}") - @MethodSource("provideValidationTestCases") - void testParameterValidation(ValidationTestCase testCase) { - DatabricksOAuthTokenSource tokenSource = - new DatabricksOAuthTokenSource.Builder( - testCase.clientId, - testCase.host, - testCase.endpoints, - testCase.idTokenSource, - testCase.httpClient) - .build(); - - IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, () -> tokenSource.getToken()); - - String expectedMessage = - String.format(testCase.isNullTest ? ERROR_NULL : ERROR_EMPTY, testCase.expectedFieldName); - assertEquals(expectedMessage, exception.getMessage()); - } } From afd1d5bb2aab323652c080eb72c5947e6849edf9 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Wed, 21 May 2025 13:00:57 +0000 Subject: [PATCH 03/31] override generated code temp --- .../com/databricks/sdk/WorkspaceClient.java | 16 ++++++++ .../com/databricks/sdk/core/ApiClient.java | 5 ++- .../databricks/sdk/core/DatabricksConfig.java | 28 ++++++++++++- .../sdk/core/oauth/DataPlaneTokenSource.java | 6 +-- .../sdk/core/oauth/EndpointTokenSource.java | 11 ++---- .../sdk/core/oauth/ErrorTokenSource.java | 20 ++++++++++ .../sdk/core/oauth/OAuthHeaderFactory.java | 25 ++++++++++++ .../oauth/TokenSourceCredentialsProvider.java | 10 +---- .../serving/ServingEndpointsDataPlaneAPI.java | 9 ++++- .../ServingEndpointsDataPlaneImpl.java | 39 +++++++++++++++++-- 10 files changed, 141 insertions(+), 28 deletions(-) create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 8e3a89c79..a6188308a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -5,6 +5,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.ConfigLoader; import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.core.oauth.DataPlaneTokenSource; import com.databricks.sdk.mixin.ClustersExt; import com.databricks.sdk.mixin.DbfsExt; import com.databricks.sdk.mixin.SecretsExt; @@ -139,6 +140,7 @@ import com.databricks.sdk.service.pipelines.PipelinesAPI; import com.databricks.sdk.service.pipelines.PipelinesService; import com.databricks.sdk.service.serving.ServingEndpointsAPI; +import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneAPI; import com.databricks.sdk.service.serving.ServingEndpointsService; import com.databricks.sdk.service.settings.CredentialsManagerAPI; import com.databricks.sdk.service.settings.CredentialsManagerService; @@ -291,6 +293,7 @@ public class WorkspaceClient { private SecretsExt secretsAPI; private ServicePrincipalsAPI servicePrincipalsAPI; private ServingEndpointsAPI servingEndpointsAPI; + private ServingEndpointsDataPlaneAPI servingEndpointsDataPlaneAPI; private SettingsAPI settingsAPI; private SharesAPI sharesAPI; private StatementExecutionAPI statementExecutionAPI; @@ -399,6 +402,11 @@ public WorkspaceClient(DatabricksConfig config) { secretsAPI = new SecretsExt(apiClient); servicePrincipalsAPI = new ServicePrincipalsAPI(apiClient); servingEndpointsAPI = new ServingEndpointsAPI(apiClient); + servingEndpointsDataPlaneAPI = + new ServingEndpointsDataPlaneAPI( + apiClient, + servingEndpointsAPI, + new DataPlaneTokenSource(apiClient.getHttpClient(), config.getTokenSource())); settingsAPI = new SettingsAPI(apiClient); sharesAPI = new SharesAPI(apiClient); statementExecutionAPI = new StatementExecutionAPI(apiClient); @@ -1443,6 +1451,14 @@ public ServingEndpointsAPI servingEndpoints() { return servingEndpointsAPI; } + /** + * The Serving Endpoints Data Plane API allows you to create, update, and delete model serving + * endpoints. + */ + public ServingEndpointsDataPlaneAPI servingEndpointsDataPlane() { + return servingEndpointsDataPlaneAPI; + } + /** Workspace Settings API allows users to manage settings at the workspace level. */ public SettingsAPI settings() { return settingsAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index a45590b4d..802785d6c 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -50,7 +50,6 @@ public Builder withDatabricksConfig(DatabricksConfig config) { this.accountId = config.getAccountId(); this.retryStrategyPicker = new RequestBasedRetryStrategyPicker(config.getHost()); this.isDebugHeaders = config.isDebugHeaders(); - return this; } @@ -434,4 +433,8 @@ public String serialize(Object body) throws JsonProcessingException { } return mapper.writeValueAsString(body); } + + public HttpClient getHttpClient() { + return httpClient; + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 98d75d4bc..af210c07c 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -4,7 +4,10 @@ import com.databricks.sdk.core.http.HttpClient; import com.databricks.sdk.core.http.Request; import com.databricks.sdk.core.http.Response; +import com.databricks.sdk.core.oauth.ErrorTokenSource; +import com.databricks.sdk.core.oauth.OAuthHeaderFactory; import com.databricks.sdk.core.oauth.OpenIDConnectEndpoints; +import com.databricks.sdk.core.oauth.TokenSource; import com.databricks.sdk.core.utils.Cloud; import com.databricks.sdk.core.utils.Environment; import com.fasterxml.jackson.databind.ObjectMapper; @@ -209,6 +212,23 @@ public synchronized Map authenticate() throws DatabricksExceptio } } + public TokenSource getTokenSource() { + try { + if (headerFactory == null) { + ConfigLoader.fixHostIfNeeded(this); + headerFactory = credentialsProvider.configure(this); + setAuthType(credentialsProvider.authType()); + } + if (headerFactory instanceof OAuthHeaderFactory) { + return (TokenSource) headerFactory; + } + return new ErrorTokenSource( + String.format("OAuth Token not supported for current auth type %s", authType)); + } catch (Exception e) { + return new ErrorTokenSource("Failed to get token source: " + e.getMessage()); + } + } + public CredentialsProvider getCredentialsProvider() { return this.credentialsProvider; } @@ -389,13 +409,17 @@ public DatabricksConfig setAzureUseMsi(boolean azureUseMsi) { return this; } - /** @deprecated Use {@link #getAzureUseMsi()} instead. */ + /** + * @deprecated Use {@link #getAzureUseMsi()} instead. + */ @Deprecated() public boolean getAzureUseMSI() { return azureUseMsi; } - /** @deprecated Use {@link #setAzureUseMsi(boolean)} instead. */ + /** + * @deprecated Use {@link #setAzureUseMsi(boolean)} instead. + */ @Deprecated public DatabricksConfig setAzureUseMSI(boolean azureUseMsi) { this.azureUseMsi = azureUseMsi; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java index b12a92dd2..d106019ce 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java @@ -14,7 +14,7 @@ */ public class DataPlaneTokenSource { private final HttpClient httpClient; - private final DatabricksOAuthTokenSource cpTokenSource; + private final TokenSource cpTokenSource; private final ConcurrentHashMap sourcesCache; /** @@ -62,10 +62,10 @@ public int hashCode() { * Constructs a DataPlaneTokenSource. * * @param httpClient The {@link HttpClient} for token requests. - * @param cpTokenSource The {@link DatabricksOAuthTokenSource} for control plane tokens. + * @param cpTokenSource The {@link TokenSource} for control plane tokens. * @throws NullPointerException if either parameter is null */ - public DataPlaneTokenSource(HttpClient httpClient, DatabricksOAuthTokenSource cpTokenSource) { + public DataPlaneTokenSource(HttpClient httpClient, TokenSource cpTokenSource) { this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java index c54e7f6c0..05f3c7af1 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java @@ -11,8 +11,7 @@ /** * Represents a token source that exchanges a control plane token for an endpoint-specific dataplane - * token. It utilizes an underlying {@link DatabricksOAuthTokenSource} to obtain the initial control - * plane token. + * token. It utilizes an underlying {@link TokenSource} to obtain the initial control plane token. */ public class EndpointTokenSource extends RefreshableTokenSource { private static final Logger LOG = LoggerFactory.getLogger(EndpointTokenSource.class); @@ -22,22 +21,20 @@ public class EndpointTokenSource extends RefreshableTokenSource { private static final String ASSERTION_PARAM = "assertion"; private static final String TOKEN_ENDPOINT = "/oidc/v1/token"; - private final DatabricksOAuthTokenSource cpTokenSource; + private final TokenSource cpTokenSource; private final String authDetails; private final HttpClient httpClient; /** * Constructs a new EndpointTokenSource. * - * @param cpTokenSource The {@link DatabricksOAuthTokenSource} used to obtain the control plane - * token. + * @param cpTokenSource The {@link TokenSource} used to obtain the control plane token. * @param authDetails The authorization details required for the token exchange. * @param httpClient The {@link HttpClient} used to make the token exchange request. * @throws IllegalArgumentException if authDetails is empty. * @throws NullPointerException if any of the parameters are null. */ - public EndpointTokenSource( - DatabricksOAuthTokenSource cpTokenSource, String authDetails, HttpClient httpClient) { + public EndpointTokenSource(TokenSource cpTokenSource, String authDetails, HttpClient httpClient) { this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); this.authDetails = Objects.requireNonNull(authDetails, "Authorization details cannot be null"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java new file mode 100644 index 000000000..9fe4d79d6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java @@ -0,0 +1,20 @@ +package com.databricks.sdk.core.oauth; + +import com.databricks.sdk.core.DatabricksException; + +/** + * A TokenSource implementation that always throws an error when attempting to get a token. This is + * used when the header factory is not an OAuthHeaderFactory. + */ +public class ErrorTokenSource implements TokenSource { + private final String errorMessage; + + public ErrorTokenSource(String errorMessage) { + this.errorMessage = errorMessage; + } + + @Override + public Token getToken() { + throw new DatabricksException(errorMessage); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java new file mode 100644 index 000000000..66d7394d4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java @@ -0,0 +1,25 @@ +package com.databricks.sdk.core.oauth; + +import com.databricks.sdk.core.HeaderFactory; +import java.util.HashMap; +import java.util.Map; + +public class OAuthHeaderFactory implements HeaderFactory, TokenSource { + private final TokenSource tokenSource; + + public OAuthHeaderFactory(TokenSource tokenSource) { + this.tokenSource = tokenSource; + } + + @Override + public Token getToken() { + return tokenSource.getToken(); + } + + @Override + public Map headers() { + Map headers = new HashMap<>(); + headers.put("Authorization", "Bearer " + tokenSource.getToken().getAccessToken()); + return headers; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java index 5b098d076..ca17a2884 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java @@ -3,8 +3,6 @@ import com.databricks.sdk.core.CredentialsProvider; import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.core.HeaderFactory; -import java.util.HashMap; -import java.util.Map; /** * A credentials provider that uses a TokenSource to obtain and manage authentication tokens. This @@ -44,13 +42,7 @@ public HeaderFactory configure(DatabricksConfig config) { // Validate that we can get a token before returning a HeaderFactory tokenSource.getToken().getAccessToken(); - return () -> { - Map headers = new HashMap<>(); - // Some TokenSource implementations cache tokens internally, so an additional getToken() - // call is not costly - headers.put("Authorization", "Bearer " + tokenSource.getToken().getAccessToken()); - return headers; - }; + return new OAuthHeaderFactory(tokenSource); } catch (Exception e) { return null; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java index 05aef2cb6..a1e7f1214 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java @@ -2,6 +2,7 @@ package com.databricks.sdk.service.serving; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.oauth.DataPlaneTokenSource; import com.databricks.sdk.support.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -17,8 +18,12 @@ public class ServingEndpointsDataPlaneAPI { private final ServingEndpointsDataPlaneService impl; /** Regular-use constructor */ - public ServingEndpointsDataPlaneAPI(ApiClient apiClient) { - impl = new ServingEndpointsDataPlaneImpl(apiClient); + public ServingEndpointsDataPlaneAPI( + ApiClient apiClient, + ServingEndpointsAPI servingEndpointsAPI, + DataPlaneTokenSource dataPlaneTokenSource) { + + impl = new ServingEndpointsDataPlaneImpl(apiClient, servingEndpointsAPI, dataPlaneTokenSource); } /** Constructor for mocks */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index 2dabe61d2..357b2d506 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -4,26 +4,57 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.core.oauth.DataPlaneTokenSource; +import com.databricks.sdk.core.oauth.Token; import com.databricks.sdk.support.Generated; import java.io.IOException; +import java.util.concurrent.ConcurrentHashMap; /** Package-local implementation of ServingEndpointsDataPlane */ @Generated class ServingEndpointsDataPlaneImpl implements ServingEndpointsDataPlaneService { private final ApiClient apiClient; + private final ServingEndpointsAPI controlPlane; + private final DataPlaneTokenSource dataPlaneTokenSource; + private final ConcurrentHashMap infos; - public ServingEndpointsDataPlaneImpl(ApiClient apiClient) { + public ServingEndpointsDataPlaneImpl( + ApiClient apiClient, + ServingEndpointsAPI controlPlane, + DataPlaneTokenSource dataPlaneTokenSource) { this.apiClient = apiClient; + this.controlPlane = controlPlane; + this.dataPlaneTokenSource = dataPlaneTokenSource; + this.infos = new ConcurrentHashMap<>(); + } + + private DataPlaneInfo dataPlaneInfoQuery(QueryEndpointInput request) { + String key = String.format("Query/%s", request.getName()); + + return infos.computeIfAbsent( + key, + k -> { + ServingEndpointDetailed response = + controlPlane.get(new GetServingEndpointRequest().setName(request.getName())); + return response.getDataPlaneInfo().getQueryInfo(); + }); } @Override public QueryEndpointResponse query(QueryEndpointInput request) { - String path = String.format("/serving-endpoints/%s/invocations", request.getName()); + DataPlaneInfo dataPlaneInfo = dataPlaneInfoQuery(request); + + Token token = + dataPlaneTokenSource.getToken( + dataPlaneInfo.getEndpointUrl(), dataPlaneInfo.getAuthorizationDetails()); + try { - Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); + Request req = + new Request("POST", dataPlaneInfo.getEndpointUrl(), apiClient.serialize(request)); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); + req.withHeader("Authorization", "Bearer " + token.getAccessToken()); + return apiClient.execute(req, QueryEndpointResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); From 4a8832bdee3bc6854fae80d1796e4cef5af21769 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Thu, 22 May 2025 13:06:43 +0000 Subject: [PATCH 04/31] Modify all credentials provider which are OAuth --- .../sdk/core/AzureCliCredentialsProvider.java | 23 ++++++++------ .../DatabricksCliCredentialsProvider.java | 11 ++----- .../AzureGithubOidcCredentialsProvider.java | 10 ++---- ...reServicePrincipalCredentialsProvider.java | 18 ++++++----- .../ExternalBrowserCredentialsProvider.java | 3 +- .../sdk/core/oauth/OAuthHeaderFactory.java | 31 +++++++++---------- .../OAuthHeaderFactoryFromSuppliers.java | 29 +++++++++++++++++ .../OAuthHeaderFactoryFromTokenSource.java | 25 +++++++++++++++ ...2MServicePrincipalCredentialsProvider.java | 13 ++------ .../sdk/core/oauth/SessionCredentials.java | 11 ++----- .../oauth/TokenSourceCredentialsProvider.java | 5 ++- ...zureGithubOidcCredentialsProviderTest.java | 2 +- 12 files changed, 105 insertions(+), 76 deletions(-) create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/AzureCliCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/AzureCliCredentialsProvider.java index b08dfb8b4..6ed5b83d3 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/AzureCliCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/AzureCliCredentialsProvider.java @@ -1,5 +1,6 @@ package com.databricks.sdk.core; +import com.databricks.sdk.core.oauth.OAuthHeaderFactory; import com.databricks.sdk.core.oauth.Token; import com.databricks.sdk.core.utils.AzureUtils; import com.fasterxml.jackson.databind.ObjectMapper; @@ -68,7 +69,7 @@ private Optional getSubscription(DatabricksConfig config) { } @Override - public HeaderFactory configure(DatabricksConfig config) { + public OAuthHeaderFactory configure(DatabricksConfig config) { if (!config.isAzure()) { return null; } @@ -86,15 +87,17 @@ public HeaderFactory configure(DatabricksConfig config) { mgmtTokenSource = null; } CliTokenSource finalMgmtTokenSource = mgmtTokenSource; - return () -> { - Token token = tokenSource.getToken(); - Map headers = new HashMap<>(); - headers.put("Authorization", token.getTokenType() + " " + token.getAccessToken()); - if (finalMgmtTokenSource != null) { - AzureUtils.addSpManagementToken(finalMgmtTokenSource, headers); - } - return AzureUtils.addWorkspaceResourceId(config, headers); - }; + return OAuthHeaderFactory.fromSuppliers( + tokenSource::getToken, + () -> { + Token token = tokenSource.getToken(); + Map headers = new HashMap<>(); + headers.put("Authorization", token.getTokenType() + " " + token.getAccessToken()); + if (finalMgmtTokenSource != null) { + AzureUtils.addSpManagementToken(finalMgmtTokenSource, headers); + } + return AzureUtils.addWorkspaceResourceId(config, headers); + }); } catch (DatabricksException e) { String stderr = e.getMessage(); if (stderr.contains("not found")) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java index c20ac2891..655d0b599 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksCliCredentialsProvider.java @@ -1,6 +1,6 @@ package com.databricks.sdk.core; -import com.databricks.sdk.core.oauth.Token; +import com.databricks.sdk.core.oauth.OAuthHeaderFactory; import com.databricks.sdk.core.utils.OSUtils; import java.util.*; import org.slf4j.Logger; @@ -36,7 +36,7 @@ private CliTokenSource getDatabricksCliTokenSource(DatabricksConfig config) { } @Override - public HeaderFactory configure(DatabricksConfig config) { + public OAuthHeaderFactory configure(DatabricksConfig config) { String host = config.getHost(); if (host == null) { return null; @@ -48,12 +48,7 @@ public HeaderFactory configure(DatabricksConfig config) { return null; } tokenSource.getToken(); // We need this for checking if databricks CLI is installed. - return () -> { - Token token = tokenSource.getToken(); - Map headers = new HashMap<>(); - headers.put("Authorization", token.getTokenType() + " " + token.getAccessToken()); - return headers; - }; + return OAuthHeaderFactory.fromTokenSource(tokenSource); } catch (DatabricksException e) { String stderr = e.getMessage(); if (stderr.contains("not found")) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProvider.java index 316667114..b29b5aa0e 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProvider.java @@ -6,8 +6,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; import java.util.Optional; /** @@ -25,7 +23,7 @@ public String authType() { } @Override - public HeaderFactory configure(DatabricksConfig config) { + public OAuthHeaderFactory configure(DatabricksConfig config) { if (!config.isAzure() || config.getAzureClientId() == null || config.getAzureTenantId() == null @@ -49,11 +47,7 @@ public HeaderFactory configure(DatabricksConfig config) { idToken.get(), "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"); - return () -> { - Map headers = new HashMap<>(); - headers.put("Authorization", "Bearer " + tokenSource.getToken().getAccessToken()); - return headers; - }; + return OAuthHeaderFactory.fromTokenSource(tokenSource); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureServicePrincipalCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureServicePrincipalCredentialsProvider.java index 432046777..c7c7bb672 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureServicePrincipalCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/AzureServicePrincipalCredentialsProvider.java @@ -19,7 +19,7 @@ public String authType() { } @Override - public HeaderFactory configure(DatabricksConfig config) { + public OAuthHeaderFactory configure(DatabricksConfig config) { if (!config.isAzure() || config.getAzureClientId() == null || config.getAzureClientSecret() == null @@ -32,13 +32,15 @@ public HeaderFactory configure(DatabricksConfig config) { RefreshableTokenSource cloud = tokenSourceFor(config, config.getAzureEnvironment().getServiceManagementEndpoint()); - return () -> { - Map headers = new HashMap<>(); - headers.put("Authorization", "Bearer " + inner.getToken().getAccessToken()); - AzureUtils.addWorkspaceResourceId(config, headers); - AzureUtils.addSpManagementToken(cloud, headers); - return headers; - }; + return OAuthHeaderFactory.fromSuppliers( + inner::getToken, + () -> { + Map headers = new HashMap<>(); + headers.put("Authorization", "Bearer " + inner.getToken().getAccessToken()); + AzureUtils.addWorkspaceResourceId(config, headers); + AzureUtils.addSpManagementToken(cloud, headers); + return headers; + }); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ExternalBrowserCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ExternalBrowserCredentialsProvider.java index b8aa4c66f..7bae60022 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ExternalBrowserCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ExternalBrowserCredentialsProvider.java @@ -3,7 +3,6 @@ import com.databricks.sdk.core.CredentialsProvider; import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.core.HeaderFactory; import java.io.IOException; import java.nio.file.Path; import java.util.Objects; @@ -44,7 +43,7 @@ public String authType() { } @Override - public HeaderFactory configure(DatabricksConfig config) { + public OAuthHeaderFactory configure(DatabricksConfig config) { if (config.getHost() == null || !Objects.equals(config.getAuthType(), "external-browser")) { return null; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java index 66d7394d4..2d8a64eb2 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java @@ -1,25 +1,24 @@ package com.databricks.sdk.core.oauth; import com.databricks.sdk.core.HeaderFactory; -import java.util.HashMap; import java.util.Map; +import java.util.function.Supplier; -public class OAuthHeaderFactory implements HeaderFactory, TokenSource { - private final TokenSource tokenSource; - - public OAuthHeaderFactory(TokenSource tokenSource) { - this.tokenSource = tokenSource; - } - - @Override - public Token getToken() { - return tokenSource.getToken(); +public interface OAuthHeaderFactory extends HeaderFactory, TokenSource { + /** + * Creates an OAuthHeaderFactory from separate token and header suppliers. This allows for custom + * header generation beyond just the Authorization header. + */ + static OAuthHeaderFactory fromSuppliers( + Supplier tokenSupplier, Supplier> headerSupplier) { + return new OAuthHeaderFactoryFromSuppliers(tokenSupplier, headerSupplier); } - @Override - public Map headers() { - Map headers = new HashMap<>(); - headers.put("Authorization", "Bearer " + tokenSource.getToken().getAccessToken()); - return headers; + /** + * Creates an OAuthHeaderFactory from a TokenSource. This is a convenience method for the common + * case where headers are derived from the token. + */ + static OAuthHeaderFactory fromTokenSource(TokenSource tokenSource) { + return new OAuthHeaderFactoryFromTokenSource(tokenSource); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java new file mode 100644 index 000000000..46c70ec77 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java @@ -0,0 +1,29 @@ +package com.databricks.sdk.core.oauth; + +import java.util.Map; +import java.util.function.Supplier; + +/** + * A concrete implementation of OAuthHeaderFactory that uses suppliers for both token and header + * generation. This allows for custom header generation beyond just the Authorization header. + */ +public class OAuthHeaderFactoryFromSuppliers implements OAuthHeaderFactory { + private final Supplier tokenSupplier; + private final Supplier> headerSupplier; + + public OAuthHeaderFactoryFromSuppliers( + Supplier tokenSupplier, Supplier> headerSupplier) { + this.tokenSupplier = tokenSupplier; + this.headerSupplier = headerSupplier; + } + + @Override + public Map headers() { + return headerSupplier.get(); + } + + @Override + public Token getToken() { + return tokenSupplier.get(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java new file mode 100644 index 000000000..7a9416edf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java @@ -0,0 +1,25 @@ +package com.databricks.sdk.core.oauth; + +import java.util.HashMap; +import java.util.Map; + +public class OAuthHeaderFactoryFromTokenSource implements OAuthHeaderFactory { + private final TokenSource tokenSource; + + public OAuthHeaderFactoryFromTokenSource(TokenSource tokenSource) { + this.tokenSource = tokenSource; + } + + @Override + public Token getToken() { + return tokenSource.getToken(); + } + + @Override + public Map headers() { + Token token = tokenSource.getToken(); + Map headers = new HashMap<>(); + headers.put("Authorization", token.getTokenType() + " " + token.getAccessToken()); + return headers; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthM2MServicePrincipalCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthM2MServicePrincipalCredentialsProvider.java index 9b389cb34..058fc268c 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthM2MServicePrincipalCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthM2MServicePrincipalCredentialsProvider.java @@ -1,18 +1,14 @@ package com.databricks.sdk.core.oauth; import com.databricks.sdk.core.*; -import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.util.Collections; -import java.util.HashMap; -import java.util.Map; /** * Adds refreshed Databricks machine-to-machine OAuth Bearer token to every request, if * /oidc/.well-known/oauth-authorization-server is available on the given host. */ public class OAuthM2MServicePrincipalCredentialsProvider implements CredentialsProvider { - private final ObjectMapper mapper = new ObjectMapper(); @Override public String authType() { @@ -20,7 +16,7 @@ public String authType() { } @Override - public HeaderFactory configure(DatabricksConfig config) { + public OAuthHeaderFactory configure(DatabricksConfig config) { if (config.getClientId() == null || config.getClientSecret() == null || config.getHost() == null) { @@ -41,12 +37,7 @@ public HeaderFactory configure(DatabricksConfig config) { .withAuthParameterPosition(AuthParameterPosition.HEADER) .build(); - return () -> { - Token token = tokenSource.getToken(); - Map headers = new HashMap<>(); - headers.put("Authorization", token.getTokenType() + " " + token.getAccessToken()); - return headers; - }; + return OAuthHeaderFactory.fromTokenSource(tokenSource); } catch (IOException e) { // TODO: Log exception throw new DatabricksException("Unable to fetch OIDC endpoint: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/SessionCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/SessionCredentials.java index 9114b6d6c..4d2d512e3 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/SessionCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/SessionCredentials.java @@ -3,12 +3,10 @@ import com.databricks.sdk.core.CredentialsProvider; import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.core.HeaderFactory; import com.databricks.sdk.core.http.HttpClient; import java.io.Serializable; import java.util.HashMap; import java.util.Map; -import org.apache.http.HttpHeaders; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,13 +28,8 @@ public String authType() { } @Override - public HeaderFactory configure(DatabricksConfig config) { - return () -> { - Map headers = new HashMap<>(); - headers.put( - HttpHeaders.AUTHORIZATION, getToken().getTokenType() + " " + getToken().getAccessToken()); - return headers; - }; + public OAuthHeaderFactory configure(DatabricksConfig config) { + return OAuthHeaderFactory.fromTokenSource(this); } static class Builder { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java index ca17a2884..9a341b901 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenSourceCredentialsProvider.java @@ -2,7 +2,6 @@ import com.databricks.sdk.core.CredentialsProvider; import com.databricks.sdk.core.DatabricksConfig; -import com.databricks.sdk.core.HeaderFactory; /** * A credentials provider that uses a TokenSource to obtain and manage authentication tokens. This @@ -37,12 +36,12 @@ public TokenSourceCredentialsProvider(TokenSource tokenSource, String authType) * acquisition fails. */ @Override - public HeaderFactory configure(DatabricksConfig config) { + public OAuthHeaderFactory configure(DatabricksConfig config) { try { // Validate that we can get a token before returning a HeaderFactory tokenSource.getToken().getAccessToken(); - return new OAuthHeaderFactory(tokenSource); + return OAuthHeaderFactory.fromTokenSource(tokenSource); } catch (Exception e) { return null; } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProviderTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProviderTest.java index 10b9c0ecc..f67beceb4 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProviderTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/AzureGithubOidcCredentialsProviderTest.java @@ -25,7 +25,7 @@ public class AzureGithubOidcCredentialsProviderTest { private static final String OAUTH_RESPONSE = new JSONObject() .put("access_token", TOKEN) - .put("token_type", "token-type") + .put("token_type", "Bearer") .put("expires_in", 360) .toString(); From 419121e47d2c6013f9748f31299abd2403103400 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Thu, 22 May 2025 18:13:32 +0000 Subject: [PATCH 05/31] Finally working :) --- .../com/databricks/sdk/WorkspaceClient.java | 40 ++++++++++++------- .../com/databricks/sdk/core/ApiClient.java | 17 +++++--- .../sdk/core/oauth/DataPlaneTokenSource.java | 9 +++-- .../sdk/core/oauth/EndpointTokenSource.java | 17 ++++++-- .../sdk/core/oauth/TokenEndpointClient.java | 19 ++++++++- .../ServingEndpointsDataPlaneImpl.java | 2 + .../core/oauth/DataPlaneTokenSourceTest.java | 18 ++++++++- .../core/oauth/EndpointTokenSourceTest.java | 18 ++++++++- 8 files changed, 108 insertions(+), 32 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index a6188308a..d997b344d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -5,6 +5,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.ConfigLoader; import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.core.oauth.DataPlaneTokenSource; import com.databricks.sdk.mixin.ClustersExt; import com.databricks.sdk.mixin.DbfsExt; @@ -206,6 +207,7 @@ import com.databricks.sdk.service.workspace.WorkspaceAPI; import com.databricks.sdk.service.workspace.WorkspaceService; import com.databricks.sdk.support.Generated; +import java.io.IOException; /** Entry point for accessing Databricks workspace-level APIs */ @Generated @@ -404,9 +406,7 @@ public WorkspaceClient(DatabricksConfig config) { servingEndpointsAPI = new ServingEndpointsAPI(apiClient); servingEndpointsDataPlaneAPI = new ServingEndpointsDataPlaneAPI( - apiClient, - servingEndpointsAPI, - new DataPlaneTokenSource(apiClient.getHttpClient(), config.getTokenSource())); + apiClient, servingEndpointsAPI, createDataPlaneTokenSource(apiClient, config)); settingsAPI = new SettingsAPI(apiClient); sharesAPI = new SharesAPI(apiClient); statementExecutionAPI = new StatementExecutionAPI(apiClient); @@ -485,7 +485,7 @@ public AlertsV2API alertsV2() { } /** - * Apps run directly on a customer’s Databricks instance, integrate with their data, use and + * Apps run directly on a customer's Databricks instance, integrate with their data, use and * extend Databricks services, and enable users to interact through single sign-on. */ public AppsAPI apps() { @@ -501,7 +501,7 @@ public ArtifactAllowlistsAPI artifactAllowlists() { } /** - * A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize + * A catalog is the first layer of Unity Catalog's three-level namespace. It's used to organize * your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG * data permission. * @@ -529,7 +529,7 @@ public CleanRoomTaskRunsAPI cleanRoomTaskRuns() { /** * A clean room uses Delta Sharing and serverless compute to provide a secure and * privacy-protecting environment where multiple parties can work together on sensitive enterprise - * data without direct access to each other’s data. + * data without direct access to each other's data. */ public CleanRoomsAPI cleanRooms() { return cleanRoomsAPI; @@ -757,7 +757,7 @@ public ExperimentsAPI experiments() { * that authorizes access to the cloud storage path. Each external location is subject to Unity * Catalog access-control policies that control which users and groups can access the credential. * If a user does not have access to an external location in Unity Catalog, the request fails and - * Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. + * Unity Catalog does not attempt to authenticate to your cloud tenant on the user's behalf. * *

Databricks recommends using external locations rather than using storage credentials * directly. @@ -874,10 +874,10 @@ public GroupsAPI groups() { * *

Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, * ready-to-use instances. When a cluster is attached to a pool, cluster nodes are created using - * the pool’s idle instances. If the pool has no idle instances, the pool expands by allocating a - * new instance from the instance provider in order to accommodate the cluster’s request. When a + * the pool's idle instances. If the pool has no idle instances, the pool expands by allocating a + * new instance from the instance provider in order to accommodate the cluster's request. When a * cluster releases an instance, it returns to the pool and is free for another cluster to use. - * Only clusters attached to a pool can use that pool’s idle instances. + * Only clusters attached to a pool can use that pool's idle instances. * *

You can specify a different pool for the driver node and worker nodes, or use the same pool * for both. @@ -1344,7 +1344,7 @@ public RedashConfigAPI redashConfig() { * Catalog provide centralized access control, auditing, lineage, and discovery of ML models * across Databricks workspaces. * - *

An MLflow registered model resides in the third layer of Unity Catalog’s three-level + *

An MLflow registered model resides in the third layer of Unity Catalog's three-level * namespace. Registered models contain model versions, which correspond to actual ML models * (MLflow models). Creating new model versions currently requires use of the MLflow Python * client. Once model versions are created, you can load them for batch inference using MLflow @@ -1400,7 +1400,7 @@ public ResourceQuotasAPI resourceQuotas() { } /** - * A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. + * A schema (also called a database) is the second layer of Unity Catalog's three-level namespace. * A schema organizes tables, views and functions. To access (or list) a table or view in a * schema, users must have the USE_SCHEMA data permission on the schema and its parent catalog, * and they must have the SELECT permission on the table or view. @@ -1575,7 +1575,7 @@ public StatementExecutionAPI statementExecution() { * data stored on your cloud tenant. Each storage credential is subject to Unity Catalog * access-control policies that control which users and groups can access the credential. If a * user does not have access to a storage credential in Unity Catalog, the request fails and Unity - * Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. + * Catalog does not attempt to authenticate to your cloud tenant on the user's behalf. * *

Databricks recommends using external locations rather than using storage credentials * directly. @@ -1614,7 +1614,7 @@ public TableConstraintsAPI tableConstraints() { } /** - * A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows + * A table resides in the third layer of Unity Catalog's three-level namespace. It contains rows * of data. To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the * schema, and they must have the USE_CATALOG permission on its parent catalog. To query a table, * users must have the SELECT permission on the table, and they must have the USE_CATALOG @@ -1670,7 +1670,7 @@ public TokensAPI tokens() { * or team by using your identity provider to create users and groups in Databricks workspace and * give them the proper level of access. When a user leaves your organization or no longer needs * access to Databricks workspace, admins can terminate the user in your identity provider and - * that user’s account will also be removed from Databricks workspace. This ensures a consistent + * that user's account will also be removed from Databricks workspace. This ensures a consistent * offboarding process and prevents unauthorized users from accessing sensitive data. */ public UsersAPI users() { @@ -2898,4 +2898,14 @@ public ApiClient apiClient() { public DatabricksConfig config() { return config; } + + private DataPlaneTokenSource createDataPlaneTokenSource( + ApiClient apiClient, DatabricksConfig config) { + try { + return new DataPlaneTokenSource( + apiClient.getHttpClient(), config.getTokenSource(), config.getOidcEndpoints()); + } catch (IOException e) { + throw new DatabricksException("Failed to create DataPlaneTokenSource", e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index 802785d6c..138ce58ae 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -207,12 +207,19 @@ private Response executeInner(Request in, String path) { IOException err = null; Response out = null; - // Authenticate the request. Failures should not be retried. - in.withHeaders(authenticateFunc.apply(null)); + // Only apply authentication headers if Authorization header is not already set + if (!in.getHeaders().containsKey("Authorization")) { + in.withHeaders(authenticateFunc.apply(null)); + } - // Prepend host to URL only after config.authenticate(). - // This call may configure the host (e.g. in case of notebook native auth). - in.withUrl(getHostFunc.apply(null) + path); + // Only prepend host if the path is not an absolute URL + if (!path.startsWith("http://") && !path.startsWith("https://")) { + // Prepend host to URL only after config.authenticate(). + // This call may configure the host (e.g. in case of notebook native auth). + in.withUrl(getHostFunc.apply(null) + path); + } else { + in.withUrl(path); + } // Set User-Agent with auth type info, which is available only // after the first invocation to config.authenticate() diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java index d106019ce..8ed96338d 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java @@ -16,7 +16,7 @@ public class DataPlaneTokenSource { private final HttpClient httpClient; private final TokenSource cpTokenSource; private final ConcurrentHashMap sourcesCache; - + private final OpenIDConnectEndpoints endpoints; /** * Caching key for {@link EndpointTokenSource}, based on endpoint and authorization details. This * is a value object that uniquely identifies a token source configuration. @@ -65,11 +65,13 @@ public int hashCode() { * @param cpTokenSource The {@link TokenSource} for control plane tokens. * @throws NullPointerException if either parameter is null */ - public DataPlaneTokenSource(HttpClient httpClient, TokenSource cpTokenSource) { + public DataPlaneTokenSource( + HttpClient httpClient, TokenSource cpTokenSource, OpenIDConnectEndpoints endpoints) { this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); this.sourcesCache = new ConcurrentHashMap<>(); + this.endpoints = Objects.requireNonNull(endpoints, "OpenID Connect endpoints cannot be null"); } /** @@ -95,7 +97,8 @@ public Token getToken(String endpoint, String authDetails) { EndpointTokenSource specificSource = sourcesCache.computeIfAbsent( - key, k -> new EndpointTokenSource(this.cpTokenSource, k.authDetails, this.httpClient)); + key, + k -> new EndpointTokenSource(this.cpTokenSource, k.authDetails, this.httpClient, this.endpoints)); return specificSource.getToken(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java index 05f3c7af1..abb2aae22 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java @@ -24,7 +24,7 @@ public class EndpointTokenSource extends RefreshableTokenSource { private final TokenSource cpTokenSource; private final String authDetails; private final HttpClient httpClient; - + private final OpenIDConnectEndpoints endpoints; /** * Constructs a new EndpointTokenSource. * @@ -34,7 +34,11 @@ public class EndpointTokenSource extends RefreshableTokenSource { * @throws IllegalArgumentException if authDetails is empty. * @throws NullPointerException if any of the parameters are null. */ - public EndpointTokenSource(TokenSource cpTokenSource, String authDetails, HttpClient httpClient) { + public EndpointTokenSource( + TokenSource cpTokenSource, + String authDetails, + HttpClient httpClient, + OpenIDConnectEndpoints endpoints) { this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); this.authDetails = Objects.requireNonNull(authDetails, "Authorization details cannot be null"); @@ -42,6 +46,7 @@ public EndpointTokenSource(TokenSource cpTokenSource, String authDetails, HttpCl throw new IllegalArgumentException("Authorization details cannot be empty"); } this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); + this.endpoints = Objects.requireNonNull(endpoints, "OpenID Connect endpoints cannot be null"); } /** @@ -61,15 +66,17 @@ public EndpointTokenSource(TokenSource cpTokenSource, String authDetails, HttpCl @Override protected Token refresh() { Token cpToken = cpTokenSource.getToken(); - + System.out.println("Fetched CP Token: " + cpToken.getAccessToken()); Map params = new HashMap<>(); params.put(GRANT_TYPE_PARAM, JWT_GRANT_TYPE); params.put(AUTHORIZATION_DETAILS_PARAM, authDetails); params.put(ASSERTION_PARAM, cpToken.getAccessToken()); + System.out.println("Params: " + params); + OAuthResponse oauthResponse; try { - oauthResponse = TokenEndpointClient.requestToken(this.httpClient, TOKEN_ENDPOINT, params); + oauthResponse = TokenEndpointClient.requestToken(this.httpClient, endpoints.getTokenEndpoint(), params); } catch (DatabricksException | IllegalArgumentException | NullPointerException e) { LOG.error( "Failed to exchange control plane token for dataplane token at endpoint {}: {}", @@ -79,6 +86,8 @@ protected Token refresh() { throw e; } + System.out.println("Successfully fetched Dataplane Token: " + oauthResponse.getAccessToken()); + LocalDateTime expiry = LocalDateTime.now().plusSeconds(oauthResponse.getExpiresIn()); return new Token( oauthResponse.getAccessToken(), diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java index 69883dd24..df5913fb9 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java @@ -6,6 +6,7 @@ import com.databricks.sdk.core.http.Response; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import org.slf4j.Logger; @@ -46,10 +47,26 @@ public static OAuthResponse requestToken( throw new IllegalArgumentException("Token endpoint URL cannot be empty"); } + // Create a new map with properly encoded JSON values + Map encodedParams = new HashMap<>(params); + String authDetails = params.get("authorization_details"); + if (authDetails != null) { + try { + // Parse and re-serialize the authorization details to ensure it's valid JSON + Object authDetailsJson = OBJECT_MAPPER.readValue(authDetails, Object.class); + String encodedAuthDetails = OBJECT_MAPPER.writeValueAsString(authDetailsJson); + encodedParams.put("authorization_details", encodedAuthDetails); + } catch (IOException e) { + LOG.error("Failed to encode authorization details", e); + throw new DatabricksException("Failed to encode authorization details: " + e.getMessage(), e); + } + } + Response rawResponse; try { LOG.debug("Requesting token from endpoint: {}", tokenEndpointUrl); - rawResponse = httpClient.execute(new FormRequest(tokenEndpointUrl, params)); + LOG.debug("Token request parameters: {}", encodedParams); + rawResponse = httpClient.execute(new FormRequest(tokenEndpointUrl, encodedParams)); } catch (IOException e) { LOG.error("Failed to request token from {}: {}", tokenEndpointUrl, e.getMessage(), e); throw new DatabricksException( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index 357b2d506..1efe7f0f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -48,6 +48,8 @@ public QueryEndpointResponse query(QueryEndpointInput request) { dataPlaneTokenSource.getToken( dataPlaneInfo.getEndpointUrl(), dataPlaneInfo.getAuthorizationDetails()); + System.out.println("DP Token: " + token.getAccessToken()); + try { Request req = new Request("POST", dataPlaneInfo.getEndpointUrl(), apiClient.serialize(request)); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java index 91418798e..0d8e830ef 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java @@ -73,6 +73,10 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti // For null httpClient HttpClient nullHttpClient = null; + // Mock OpenIDConnectEndpoints + OpenIDConnectEndpoints mockEndpoints = mock(OpenIDConnectEndpoints.class); + when(mockEndpoints.getTokenEndpoint()).thenReturn("https://test.databricks.com/oidc/v1/token"); + // For null/empty endpoint or authDetails return Stream.of( Arguments.of( @@ -81,6 +85,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockSuccessClient1, mockCpTokenSource, + mockEndpoints, new Token( "dp-access-token1", TEST_TOKEN_TYPE, @@ -94,6 +99,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_2, mockSuccessClient2, mockCpTokenSource, + mockEndpoints, new Token( "dp-access-token2", TEST_TOKEN_TYPE, @@ -106,6 +112,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockErrorClient, mockCpTokenSource, + mockEndpoints, null, com.databricks.sdk.core.DatabricksException.class), Arguments.of( @@ -114,6 +121,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockIOExceptionClient, mockCpTokenSource, + mockEndpoints, null, com.databricks.sdk.core.DatabricksException.class), Arguments.of( @@ -122,6 +130,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockSuccessClient1, nullCpTokenSource, + mockEndpoints, null, NullPointerException.class), Arguments.of( @@ -130,6 +139,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, nullHttpClient, mockCpTokenSource, + mockEndpoints, null, NullPointerException.class), Arguments.of( @@ -138,6 +148,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockSuccessClient1, mockCpTokenSource, + mockEndpoints, null, NullPointerException.class), Arguments.of( @@ -146,6 +157,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti null, mockSuccessClient1, mockCpTokenSource, + mockEndpoints, null, NullPointerException.class)); } @@ -158,17 +170,19 @@ void testDataPlaneTokenSource( String authDetails, HttpClient httpClient, DatabricksOAuthTokenSource cpTokenSource, + OpenIDConnectEndpoints endpoints, Token expectedToken, Class expectedException) { if (expectedException != null) { assertThrows( expectedException, () -> { - DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource); + DataPlaneTokenSource source = + new DataPlaneTokenSource(httpClient, cpTokenSource, endpoints); source.getToken(endpoint, authDetails); }); } else { - DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource); + DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource, endpoints); Token token = source.getToken(endpoint, authDetails); assertNotNull(token); assertEquals(expectedToken.getAccessToken(), token.getAccessToken()); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java index 549077690..9114b898b 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java @@ -73,12 +73,17 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio HttpClient mockIOExceptionClient = mock(HttpClient.class); when(mockIOExceptionClient.execute(any())).thenThrow(new IOException("Network error")); + // Mock OpenIDConnectEndpoints + OpenIDConnectEndpoints mockEndpoints = mock(OpenIDConnectEndpoints.class); + when(mockEndpoints.getTokenEndpoint()).thenReturn("https://test.databricks.com/oidc/v1/token"); + return Stream.of( Arguments.of( "Success response", mockCpTokenSource, TEST_AUTH_DETAILS, mockSuccessClient, + mockEndpoints, null, // No exception expected TEST_DP_TOKEN, TEST_TOKEN_TYPE, @@ -89,6 +94,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, mockErrorClient, + mockEndpoints, DatabricksException.class, null, null, @@ -99,6 +105,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, mockMalformedClient, + mockEndpoints, DatabricksException.class, null, null, @@ -109,6 +116,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, mockIOExceptionClient, + mockEndpoints, DatabricksException.class, null, null, @@ -119,6 +127,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio null, TEST_AUTH_DETAILS, mockSuccessClient, + mockEndpoints, NullPointerException.class, null, null, @@ -129,6 +138,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, null, mockSuccessClient, + mockEndpoints, NullPointerException.class, null, null, @@ -139,6 +149,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, "", mockSuccessClient, + mockEndpoints, IllegalArgumentException.class, null, null, @@ -149,6 +160,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, null, + mockEndpoints, NullPointerException.class, null, null, @@ -163,6 +175,7 @@ void testEndpointTokenSource( DatabricksOAuthTokenSource cpTokenSource, String authDetails, HttpClient httpClient, + OpenIDConnectEndpoints endpoints, Class expectedException, String expectedAccessToken, String expectedTokenType, @@ -173,11 +186,12 @@ void testEndpointTokenSource( expectedException, () -> { EndpointTokenSource source = - new EndpointTokenSource(cpTokenSource, authDetails, httpClient); + new EndpointTokenSource(cpTokenSource, authDetails, httpClient, endpoints); source.getToken(); }); } else { - EndpointTokenSource source = new EndpointTokenSource(cpTokenSource, authDetails, httpClient); + EndpointTokenSource source = + new EndpointTokenSource(cpTokenSource, authDetails, httpClient, endpoints); Token token = source.getToken(); assertNotNull(token); assertEquals(expectedAccessToken, token.getAccessToken()); From 594f17eeee87a962a8511b84813cc9023f34ed50 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Fri, 23 May 2025 14:38:27 +0000 Subject: [PATCH 06/31] Update ApiClient --- .../com/databricks/sdk/WorkspaceClient.java | 15 +--- .../com/databricks/sdk/core/ApiClient.java | 77 +++++++++++-------- .../sdk/core/http/RequestModifier.java | 13 ++++ .../sdk/core/oauth/DataPlaneTokenSource.java | 8 +- .../sdk/core/oauth/EndpointTokenSource.java | 8 +- .../serving/ServingEndpointsDataPlaneAPI.java | 7 +- .../ServingEndpointsDataPlaneImpl.java | 14 ++-- .../core/oauth/DataPlaneTokenSourceTest.java | 5 +- .../core/oauth/EndpointTokenSourceTest.java | 5 +- 9 files changed, 86 insertions(+), 66 deletions(-) create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index d997b344d..6f4c5cd60 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -5,8 +5,6 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.ConfigLoader; import com.databricks.sdk.core.DatabricksConfig; -import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.core.oauth.DataPlaneTokenSource; import com.databricks.sdk.mixin.ClustersExt; import com.databricks.sdk.mixin.DbfsExt; import com.databricks.sdk.mixin.SecretsExt; @@ -207,7 +205,6 @@ import com.databricks.sdk.service.workspace.WorkspaceAPI; import com.databricks.sdk.service.workspace.WorkspaceService; import com.databricks.sdk.support.Generated; -import java.io.IOException; /** Entry point for accessing Databricks workspace-level APIs */ @Generated @@ -406,7 +403,7 @@ public WorkspaceClient(DatabricksConfig config) { servingEndpointsAPI = new ServingEndpointsAPI(apiClient); servingEndpointsDataPlaneAPI = new ServingEndpointsDataPlaneAPI( - apiClient, servingEndpointsAPI, createDataPlaneTokenSource(apiClient, config)); + apiClient, config, servingEndpointsAPI); settingsAPI = new SettingsAPI(apiClient); sharesAPI = new SharesAPI(apiClient); statementExecutionAPI = new StatementExecutionAPI(apiClient); @@ -2898,14 +2895,4 @@ public ApiClient apiClient() { public DatabricksConfig config() { return config; } - - private DataPlaneTokenSource createDataPlaneTokenSource( - ApiClient apiClient, DatabricksConfig config) { - try { - return new DataPlaneTokenSource( - apiClient.getHttpClient(), config.getTokenSource(), config.getOidcEndpoints()); - } catch (IOException e) { - throw new DatabricksException("Failed to create DataPlaneTokenSource", e); - } - } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index 138ce58ae..bc99baddc 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -4,6 +4,7 @@ import com.databricks.sdk.core.error.PrivateLinkInfo; import com.databricks.sdk.core.http.HttpClient; import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.core.http.RequestModifier; import com.databricks.sdk.core.http.Response; import com.databricks.sdk.core.retry.RequestBasedRetryStrategyPicker; import com.databricks.sdk.core.retry.RetryStrategy; @@ -179,26 +180,7 @@ protected O withJavaType(Request request, JavaType javaType) { } } - /** - * Executes HTTP request with retries and converts it to proper POJO - * - * @param in Commons HTTP request - * @param target Expected pojo type - * @return POJO of requested type - */ - public T execute(Request in, Class target) throws IOException { - Response out = getResponse(in); - if (target == Void.class) { - return null; - } - return deserialize(out, target); - } - - private Response getResponse(Request in) { - return executeInner(in, in.getUrl()); - } - - private Response executeInner(Request in, String path) { + private Response executeInner(Request in, String path, Optional modifier) { RetryStrategy retryStrategy = retryStrategyPicker.getRetryStrategy(in); int attemptNumber = 0; while (true) { @@ -207,19 +189,12 @@ private Response executeInner(Request in, String path) { IOException err = null; Response out = null; - // Only apply authentication headers if Authorization header is not already set - if (!in.getHeaders().containsKey("Authorization")) { - in.withHeaders(authenticateFunc.apply(null)); - } + // Authenticate the request. Failures should not be retried. + in.withHeaders(authenticateFunc.apply(null)); - // Only prepend host if the path is not an absolute URL - if (!path.startsWith("http://") && !path.startsWith("https://")) { - // Prepend host to URL only after config.authenticate(). - // This call may configure the host (e.g. in case of notebook native auth). - in.withUrl(getHostFunc.apply(null) + path); - } else { - in.withUrl(path); - } + // Prepend host to URL only after config.authenticate(). + // This call may configure the host (e.g. in case of notebook native auth). + in.withUrl(getHostFunc.apply(null) + path); // Set User-Agent with auth type info, which is available only // after the first invocation to config.authenticate() @@ -230,6 +205,11 @@ private Response executeInner(Request in, String path) { } in.withHeader("User-Agent", userAgent); + if (modifier.isPresent()) { + System.out.println("Modifier is present"); + in = modifier.get().modify(in); + } + // Make the request, catching any exceptions, as we may want to retry. try { out = httpClient.execute(in); @@ -269,6 +249,39 @@ private Response executeInner(Request in, String path) { } } + /** + * Executes HTTP request with retries and converts it to proper POJO, using custom request modifier + * + * @param in Commons HTTP request + * @param target Expected pojo type + * @param modifier Optional request modifier to customize request behavior + * @return POJO of requested type + */ + public T execute(Request in, Class target) throws IOException { + Response out = getResponse(in); + if (target == Void.class) { + return null; + } + return deserialize(out, target); + } + + + public T execute(Request in, Class target, RequestModifier modifier) throws IOException { + Response out = getResponse(in, modifier); + if (target == Void.class) { + return null; + } + return deserialize(out, target); + } + + private Response getResponse(Request in) { + return executeInner(in, in.getUrl(), Optional.empty()); + } + + private Response getResponse(Request in, RequestModifier modifier) { + return executeInner(in, in.getUrl(), Optional.of(modifier)); + } + private boolean isRequestSuccessful(Response response, Exception e) { return e == null && response.getStatusCode() >= 200 diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java new file mode 100644 index 000000000..20550a68a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java @@ -0,0 +1,13 @@ +package com.databricks.sdk.core.http; + + /** + * Interface for modifying a request + */ + public interface RequestModifier { + /** + * Apply modifications to the request + * @param request The request to modify + * @return The modified request + */ + Request modify(Request request); + } \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java index 8ed96338d..47c6cd6c7 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java @@ -15,8 +15,8 @@ public class DataPlaneTokenSource { private final HttpClient httpClient; private final TokenSource cpTokenSource; + private final String host; private final ConcurrentHashMap sourcesCache; - private final OpenIDConnectEndpoints endpoints; /** * Caching key for {@link EndpointTokenSource}, based on endpoint and authorization details. This * is a value object that uniquely identifies a token source configuration. @@ -66,12 +66,12 @@ public int hashCode() { * @throws NullPointerException if either parameter is null */ public DataPlaneTokenSource( - HttpClient httpClient, TokenSource cpTokenSource, OpenIDConnectEndpoints endpoints) { + HttpClient httpClient, TokenSource cpTokenSource, String host) { this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); + this.host = Objects.requireNonNull(host, "Host cannot be null"); this.sourcesCache = new ConcurrentHashMap<>(); - this.endpoints = Objects.requireNonNull(endpoints, "OpenID Connect endpoints cannot be null"); } /** @@ -98,7 +98,7 @@ public Token getToken(String endpoint, String authDetails) { EndpointTokenSource specificSource = sourcesCache.computeIfAbsent( key, - k -> new EndpointTokenSource(this.cpTokenSource, k.authDetails, this.httpClient, this.endpoints)); + k -> new EndpointTokenSource(this.cpTokenSource, k.authDetails, this.httpClient, this.host)); return specificSource.getToken(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java index abb2aae22..0f64af8ad 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java @@ -24,7 +24,7 @@ public class EndpointTokenSource extends RefreshableTokenSource { private final TokenSource cpTokenSource; private final String authDetails; private final HttpClient httpClient; - private final OpenIDConnectEndpoints endpoints; + private final String host; /** * Constructs a new EndpointTokenSource. * @@ -38,7 +38,7 @@ public EndpointTokenSource( TokenSource cpTokenSource, String authDetails, HttpClient httpClient, - OpenIDConnectEndpoints endpoints) { + String host) { this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); this.authDetails = Objects.requireNonNull(authDetails, "Authorization details cannot be null"); @@ -46,7 +46,7 @@ public EndpointTokenSource( throw new IllegalArgumentException("Authorization details cannot be empty"); } this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); - this.endpoints = Objects.requireNonNull(endpoints, "OpenID Connect endpoints cannot be null"); + this.host = Objects.requireNonNull(host, "Host cannot be null"); } /** @@ -76,7 +76,7 @@ protected Token refresh() { OAuthResponse oauthResponse; try { - oauthResponse = TokenEndpointClient.requestToken(this.httpClient, endpoints.getTokenEndpoint(), params); + oauthResponse = TokenEndpointClient.requestToken(this.httpClient, this.host + TOKEN_ENDPOINT, params); } catch (DatabricksException | IllegalArgumentException | NullPointerException e) { LOG.error( "Failed to exchange control plane token for dataplane token at endpoint {}: {}", diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java index a1e7f1214..b2897150c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java @@ -2,6 +2,7 @@ package com.databricks.sdk.service.serving; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.core.oauth.DataPlaneTokenSource; import com.databricks.sdk.support.Generated; import org.slf4j.Logger; @@ -20,10 +21,10 @@ public class ServingEndpointsDataPlaneAPI { /** Regular-use constructor */ public ServingEndpointsDataPlaneAPI( ApiClient apiClient, - ServingEndpointsAPI servingEndpointsAPI, - DataPlaneTokenSource dataPlaneTokenSource) { + DatabricksConfig config, + ServingEndpointsAPI servingEndpointsAPI) { - impl = new ServingEndpointsDataPlaneImpl(apiClient, servingEndpointsAPI, dataPlaneTokenSource); + impl = new ServingEndpointsDataPlaneImpl(apiClient, config, servingEndpointsAPI); } /** Constructor for mocks */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index 1efe7f0f7..61287528e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -2,6 +2,7 @@ package com.databricks.sdk.service.serving; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.core.http.Request; import com.databricks.sdk.core.oauth.DataPlaneTokenSource; @@ -20,11 +21,11 @@ class ServingEndpointsDataPlaneImpl implements ServingEndpointsDataPlaneService public ServingEndpointsDataPlaneImpl( ApiClient apiClient, - ServingEndpointsAPI controlPlane, - DataPlaneTokenSource dataPlaneTokenSource) { + DatabricksConfig config, + ServingEndpointsAPI controlPlane) { this.apiClient = apiClient; this.controlPlane = controlPlane; - this.dataPlaneTokenSource = dataPlaneTokenSource; + this.dataPlaneTokenSource = new DataPlaneTokenSource(apiClient.getHttpClient(), config.getTokenSource(), config.getHost()); this.infos = new ConcurrentHashMap<>(); } @@ -55,9 +56,12 @@ public QueryEndpointResponse query(QueryEndpointInput request) { new Request("POST", dataPlaneInfo.getEndpointUrl(), apiClient.serialize(request)); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - req.withHeader("Authorization", "Bearer " + token.getAccessToken()); - return apiClient.execute(req, QueryEndpointResponse.class); + return apiClient.execute(req, QueryEndpointResponse.class, r -> { + r.withHeader("Authorization", "Bearer " + token.getAccessToken()); + r.withUrl(dataPlaneInfo.getEndpointUrl()); + return r; + }); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java index 0d8e830ef..e9a290a58 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java @@ -23,6 +23,7 @@ public class DataPlaneTokenSourceTest { private static final String TEST_TOKEN_TYPE = "Bearer"; private static final String TEST_REFRESH_TOKEN = "refresh-token"; private static final int TEST_EXPIRES_IN = 3600; + private static final String TEST_HOST = "https://test.databricks.com"; private static Stream provideDataPlaneTokenScenarios() throws Exception { // Mock DatabricksOAuthTokenSource for control plane token @@ -178,11 +179,11 @@ void testDataPlaneTokenSource( expectedException, () -> { DataPlaneTokenSource source = - new DataPlaneTokenSource(httpClient, cpTokenSource, endpoints); + new DataPlaneTokenSource(httpClient, cpTokenSource, TEST_HOST); source.getToken(endpoint, authDetails); }); } else { - DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource, endpoints); + DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource, TEST_HOST); Token token = source.getToken(endpoint, authDetails); assertNotNull(token); assertEquals(expectedToken.getAccessToken(), token.getAccessToken()); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java index 9114b898b..b8baa7e19 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java @@ -22,6 +22,7 @@ class EndpointTokenSourceTest { private static final String TEST_TOKEN_TYPE = "Bearer"; private static final String TEST_REFRESH_TOKEN = "refresh-token"; private static final int TEST_EXPIRES_IN = 3600; + private static final String TEST_HOST = "https://test.databricks.com"; private static Stream provideEndpointTokenScenarios() throws Exception { // Success response JSON @@ -186,12 +187,12 @@ void testEndpointTokenSource( expectedException, () -> { EndpointTokenSource source = - new EndpointTokenSource(cpTokenSource, authDetails, httpClient, endpoints); + new EndpointTokenSource(cpTokenSource, authDetails, httpClient, TEST_HOST); source.getToken(); }); } else { EndpointTokenSource source = - new EndpointTokenSource(cpTokenSource, authDetails, httpClient, endpoints); + new EndpointTokenSource(cpTokenSource, authDetails, httpClient, TEST_HOST); Token token = source.getToken(); assertNotNull(token); assertEquals(expectedAccessToken, token.getAccessToken()); From ba1b401d9fc3ba43c719a2631a3c65b991a1bb3a Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Fri, 23 May 2025 14:51:11 +0000 Subject: [PATCH 07/31] add temp test --- .../serving/ServingDataplaneExample.java | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java new file mode 100644 index 000000000..3ca872229 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java @@ -0,0 +1,67 @@ +package com.databricks.sdk.service.serving; + +import static org.junit.jupiter.api.Assertions.*; + +import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.core.DatabricksConfig; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ServingDataplaneExample { + private static final Logger LOG = LoggerFactory.getLogger(ServingDataplaneExample.class); + + @Test + void testQueryModelEndpoint() { + // Use Databricks CLI authentication with required scopes + DatabricksConfig config = new DatabricksConfig() + .setAuthType("databricks-cli") + .setHost("https://e2-dogfood.staging.cloud.databricks.com") + .setDebugHeaders(true); + + LOG.info("Creating WorkspaceClient with config: {}", config); + WorkspaceClient client = new WorkspaceClient(config); + + // Initialize the ServingEndpointsDataPlaneAPI + ServingEndpointsDataPlaneAPI servingEndpointsDataPlaneAPI = client.servingEndpointsDataPlane(); + + // Example: Query a model endpoint + String endpointName = "TestDirectDataplaneEmmy"; // Replace with your endpoint name + + try { + // Create input data for trip prediction + Map inputData = new HashMap<>(); + inputData.put("trip_distance", 2.5); + inputData.put("pickup_zip", "10001"); + inputData.put("dropoff_zip", "10002"); + + LOG.info("Preparing to send request to endpoint: {}", endpointName); + LOG.info("Request payload: {}", inputData); + + // Create the query input object + QueryEndpointInput queryInput = + new QueryEndpointInput() + .setName(endpointName) + .setInputs(new Map[]{inputData}); + + LOG.info("Querying endpoint {} with input: {}", endpointName, queryInput); + + // Query the endpoint + QueryEndpointResponse response = servingEndpointsDataPlaneAPI.query(queryInput); + + // Add assertions to verify the response + assertNotNull(response, "Response should not be null"); + assertNotNull(response.getPredictions(), "Response predictions should not be null"); + assertFalse(response.getPredictions().isEmpty(), "Response predictions should not be empty"); + + // Print the response for debugging purposes + LOG.info("Model Response: {}", response.getPredictions()); + + } catch (Exception e) { + LOG.error("Test failed with exception", e); + fail("Test failed with exception: " + e.getMessage(), e); + } + } +} From 79e360da4a46ac89b40371185ee32b989d32030b Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Fri, 23 May 2025 16:00:54 +0000 Subject: [PATCH 08/31] Add host as a field to token sources --- .../sdk/core/oauth/DataPlaneTokenSource.java | 21 +++++++-- .../sdk/core/oauth/EndpointTokenSource.java | 25 ++++++---- .../core/oauth/DataPlaneTokenSourceTest.java | 46 +++++++++++++------ .../core/oauth/EndpointTokenSourceTest.java | 45 ++++++++++++++---- 4 files changed, 101 insertions(+), 36 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java index b12a92dd2..4504d9ca3 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/DataPlaneTokenSource.java @@ -14,9 +14,9 @@ */ public class DataPlaneTokenSource { private final HttpClient httpClient; - private final DatabricksOAuthTokenSource cpTokenSource; + private final TokenSource cpTokenSource; + private final String host; private final ConcurrentHashMap sourcesCache; - /** * Caching key for {@link EndpointTokenSource}, based on endpoint and authorization details. This * is a value object that uniquely identifies a token source configuration. @@ -62,13 +62,19 @@ public int hashCode() { * Constructs a DataPlaneTokenSource. * * @param httpClient The {@link HttpClient} for token requests. - * @param cpTokenSource The {@link DatabricksOAuthTokenSource} for control plane tokens. + * @param cpTokenSource The {@link TokenSource} for control plane tokens. + * @param host The host for the token exchange request. * @throws NullPointerException if either parameter is null */ - public DataPlaneTokenSource(HttpClient httpClient, DatabricksOAuthTokenSource cpTokenSource) { + public DataPlaneTokenSource(HttpClient httpClient, TokenSource cpTokenSource, String host) { this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); + this.host = Objects.requireNonNull(host, "Host cannot be null"); + + if (host.isEmpty()) { + throw new IllegalArgumentException("Host cannot be empty"); + } this.sourcesCache = new ConcurrentHashMap<>(); } @@ -85,17 +91,22 @@ public DataPlaneTokenSource(HttpClient httpClient, DatabricksOAuthTokenSource cp public Token getToken(String endpoint, String authDetails) { Objects.requireNonNull(endpoint, "Data plane endpoint URL cannot be null"); Objects.requireNonNull(authDetails, "Authorization details cannot be null"); + if (endpoint.isEmpty()) { throw new IllegalArgumentException("Data plane endpoint URL cannot be empty"); } if (authDetails.isEmpty()) { throw new IllegalArgumentException("Authorization details cannot be empty"); } + TokenSourceKey key = new TokenSourceKey(endpoint, authDetails); EndpointTokenSource specificSource = sourcesCache.computeIfAbsent( - key, k -> new EndpointTokenSource(this.cpTokenSource, k.authDetails, this.httpClient)); + key, + k -> + new EndpointTokenSource( + this.cpTokenSource, k.authDetails, this.httpClient, this.host)); return specificSource.getToken(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java index c54e7f6c0..3ca75c441 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/EndpointTokenSource.java @@ -11,8 +11,7 @@ /** * Represents a token source that exchanges a control plane token for an endpoint-specific dataplane - * token. It utilizes an underlying {@link DatabricksOAuthTokenSource} to obtain the initial control - * plane token. + * token. It utilizes an underlying {@link TokenSource} to obtain the initial control plane token. */ public class EndpointTokenSource extends RefreshableTokenSource { private static final Logger LOG = LoggerFactory.getLogger(EndpointTokenSource.class); @@ -22,29 +21,35 @@ public class EndpointTokenSource extends RefreshableTokenSource { private static final String ASSERTION_PARAM = "assertion"; private static final String TOKEN_ENDPOINT = "/oidc/v1/token"; - private final DatabricksOAuthTokenSource cpTokenSource; + private final TokenSource cpTokenSource; private final String authDetails; private final HttpClient httpClient; + private final String host; /** * Constructs a new EndpointTokenSource. * - * @param cpTokenSource The {@link DatabricksOAuthTokenSource} used to obtain the control plane - * token. + * @param cpTokenSource The {@link TokenSource} used to obtain the control plane token. * @param authDetails The authorization details required for the token exchange. * @param httpClient The {@link HttpClient} used to make the token exchange request. - * @throws IllegalArgumentException if authDetails is empty. + * @param host The host for the token exchange request. + * @throws IllegalArgumentException if authDetails is empty or host is empty. * @throws NullPointerException if any of the parameters are null. */ public EndpointTokenSource( - DatabricksOAuthTokenSource cpTokenSource, String authDetails, HttpClient httpClient) { + TokenSource cpTokenSource, String authDetails, HttpClient httpClient, String host) { this.cpTokenSource = Objects.requireNonNull(cpTokenSource, "Control plane token source cannot be null"); this.authDetails = Objects.requireNonNull(authDetails, "Authorization details cannot be null"); + this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); + this.host = Objects.requireNonNull(host, "Host cannot be null"); + if (authDetails.isEmpty()) { throw new IllegalArgumentException("Authorization details cannot be empty"); } - this.httpClient = Objects.requireNonNull(httpClient, "HTTP client cannot be null"); + if (host.isEmpty()) { + throw new IllegalArgumentException("Host cannot be empty"); + } } /** @@ -64,7 +69,6 @@ public EndpointTokenSource( @Override protected Token refresh() { Token cpToken = cpTokenSource.getToken(); - Map params = new HashMap<>(); params.put(GRANT_TYPE_PARAM, JWT_GRANT_TYPE); params.put(AUTHORIZATION_DETAILS_PARAM, authDetails); @@ -72,7 +76,8 @@ protected Token refresh() { OAuthResponse oauthResponse; try { - oauthResponse = TokenEndpointClient.requestToken(this.httpClient, TOKEN_ENDPOINT, params); + oauthResponse = + TokenEndpointClient.requestToken(this.httpClient, this.host + TOKEN_ENDPOINT, params); } catch (DatabricksException | IllegalArgumentException | NullPointerException e) { LOG.error( "Failed to exchange control plane token for dataplane token at endpoint {}: {}", diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java index 91418798e..35b3586d5 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java @@ -23,6 +23,7 @@ public class DataPlaneTokenSourceTest { private static final String TEST_TOKEN_TYPE = "Bearer"; private static final String TEST_REFRESH_TOKEN = "refresh-token"; private static final int TEST_EXPIRES_IN = 3600; + private static final String TEST_HOST = "https://test.databricks.com"; private static Stream provideDataPlaneTokenScenarios() throws Exception { // Mock DatabricksOAuthTokenSource for control plane token @@ -31,7 +32,6 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti DatabricksOAuthTokenSource mockCpTokenSource = mock(DatabricksOAuthTokenSource.class); when(mockCpTokenSource.getToken()).thenReturn(cpToken); - // --- Mock HttpClient for different scenarios --- // Success JSON for endpoint1/auth1 String successJson1 = "{" @@ -56,7 +56,6 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti when(mockSuccessClient2.execute(any())) .thenReturn(new Response(successJson2, 200, "OK", new URL(TEST_ENDPOINT_2))); - // Error response JSON String errorJson = "{" + "\"error\":\"invalid_request\"," + "\"error_description\":\"Bad request\"" + "}"; HttpClient mockErrorClient = mock(HttpClient.class); @@ -67,12 +66,6 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti HttpClient mockIOExceptionClient = mock(HttpClient.class); when(mockIOExceptionClient.execute(any())).thenThrow(new IOException("Network error")); - // For null cpTokenSource - DatabricksOAuthTokenSource nullCpTokenSource = null; - - // For null httpClient - HttpClient nullHttpClient = null; - // For null/empty endpoint or authDetails return Stream.of( Arguments.of( @@ -81,6 +74,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockSuccessClient1, mockCpTokenSource, + TEST_HOST, new Token( "dp-access-token1", TEST_TOKEN_TYPE, @@ -94,6 +88,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_2, mockSuccessClient2, mockCpTokenSource, + TEST_HOST, new Token( "dp-access-token2", TEST_TOKEN_TYPE, @@ -106,6 +101,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockErrorClient, mockCpTokenSource, + TEST_HOST, null, com.databricks.sdk.core.DatabricksException.class), Arguments.of( @@ -114,6 +110,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockIOExceptionClient, mockCpTokenSource, + TEST_HOST, null, com.databricks.sdk.core.DatabricksException.class), Arguments.of( @@ -121,15 +118,17 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_ENDPOINT_1, TEST_AUTH_DETAILS_1, mockSuccessClient1, - nullCpTokenSource, + null, + TEST_HOST, null, NullPointerException.class), Arguments.of( "Null httpClient", TEST_ENDPOINT_1, TEST_AUTH_DETAILS_1, - nullHttpClient, + null, mockCpTokenSource, + TEST_HOST, null, NullPointerException.class), Arguments.of( @@ -138,6 +137,7 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti TEST_AUTH_DETAILS_1, mockSuccessClient1, mockCpTokenSource, + TEST_HOST, null, NullPointerException.class), Arguments.of( @@ -146,8 +146,27 @@ private static Stream provideDataPlaneTokenScenarios() throws Excepti null, mockSuccessClient1, mockCpTokenSource, + TEST_HOST, + null, + NullPointerException.class), + Arguments.of( + "Null host", + TEST_ENDPOINT_1, + TEST_AUTH_DETAILS_1, + mockSuccessClient1, + mockCpTokenSource, + null, + null, + NullPointerException.class), + Arguments.of( + "Empty host", + TEST_ENDPOINT_1, + TEST_AUTH_DETAILS_1, + mockSuccessClient1, + mockCpTokenSource, + "", null, - NullPointerException.class)); + IllegalArgumentException.class)); } @ParameterizedTest(name = "{0}") @@ -158,17 +177,18 @@ void testDataPlaneTokenSource( String authDetails, HttpClient httpClient, DatabricksOAuthTokenSource cpTokenSource, + String host, Token expectedToken, Class expectedException) { if (expectedException != null) { assertThrows( expectedException, () -> { - DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource); + DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource, host); source.getToken(endpoint, authDetails); }); } else { - DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource); + DataPlaneTokenSource source = new DataPlaneTokenSource(httpClient, cpTokenSource, host); Token token = source.getToken(endpoint, authDetails); assertNotNull(token); assertEquals(expectedToken.getAccessToken(), token.getAccessToken()); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java index 549077690..a3af2254f 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java @@ -22,9 +22,9 @@ class EndpointTokenSourceTest { private static final String TEST_TOKEN_TYPE = "Bearer"; private static final String TEST_REFRESH_TOKEN = "refresh-token"; private static final int TEST_EXPIRES_IN = 3600; + private static final String TEST_HOST = "https://test.databricks.com"; private static Stream provideEndpointTokenScenarios() throws Exception { - // Success response JSON String successJson = "{" + "\"access_token\":\"" @@ -39,12 +39,12 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio + "\"refresh_token\":\"" + TEST_REFRESH_TOKEN + "\"}"; - // Error response JSON + String errorJson = "{" + "\"error\":\"invalid_client\"," + "\"error_description\":\"Client authentication failed\"}"; - // Malformed JSON + String malformedJson = "{not valid json}"; // Mock DatabricksOAuthTokenSource for control plane token @@ -79,6 +79,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, mockSuccessClient, + TEST_HOST, null, // No exception expected TEST_DP_TOKEN, TEST_TOKEN_TYPE, @@ -89,6 +90,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, mockErrorClient, + TEST_HOST, DatabricksException.class, null, null, @@ -99,6 +101,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, mockMalformedClient, + TEST_HOST, DatabricksException.class, null, null, @@ -109,6 +112,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, mockIOExceptionClient, + TEST_HOST, DatabricksException.class, null, null, @@ -119,6 +123,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio null, TEST_AUTH_DETAILS, mockSuccessClient, + TEST_HOST, NullPointerException.class, null, null, @@ -129,6 +134,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, null, mockSuccessClient, + TEST_HOST, NullPointerException.class, null, null, @@ -139,6 +145,7 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, "", mockSuccessClient, + TEST_HOST, IllegalArgumentException.class, null, null, @@ -149,10 +156,33 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio mockCpTokenSource, TEST_AUTH_DETAILS, null, + TEST_HOST, + NullPointerException.class, + null, + null, + null, + 0), + Arguments.of( + "Null host", + mockCpTokenSource, + TEST_AUTH_DETAILS, + mockSuccessClient, + null, NullPointerException.class, null, null, null, + 0), + Arguments.of( + "Empty host", + mockCpTokenSource, + TEST_AUTH_DETAILS, + mockSuccessClient, + "", + IllegalArgumentException.class, + null, + null, + null, 0)); } @@ -163,6 +193,7 @@ void testEndpointTokenSource( DatabricksOAuthTokenSource cpTokenSource, String authDetails, HttpClient httpClient, + String host, Class expectedException, String expectedAccessToken, String expectedTokenType, @@ -173,19 +204,17 @@ void testEndpointTokenSource( expectedException, () -> { EndpointTokenSource source = - new EndpointTokenSource(cpTokenSource, authDetails, httpClient); + new EndpointTokenSource(cpTokenSource, authDetails, httpClient, host); source.getToken(); }); } else { - EndpointTokenSource source = new EndpointTokenSource(cpTokenSource, authDetails, httpClient); + EndpointTokenSource source = + new EndpointTokenSource(cpTokenSource, authDetails, httpClient, host); Token token = source.getToken(); assertNotNull(token); assertEquals(expectedAccessToken, token.getAccessToken()); assertEquals(expectedTokenType, token.getTokenType()); assertEquals(expectedRefreshToken, token.getRefreshToken()); - // Allow a few seconds of clock skew for expiry - assertTrue(token.isValid()); - assertTrue(token.getAccessToken().length() > 0); } } } From b60ed0e79a6023fc6fe52104ef11f1643fa66eba Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Fri, 23 May 2025 16:47:27 +0000 Subject: [PATCH 09/31] Updated tests --- .../core/oauth/DataPlaneTokenSourceTest.java | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java index 35b3586d5..5eb08ac93 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java @@ -10,9 +10,11 @@ import java.net.URL; import java.time.LocalDateTime; import java.util.stream.Stream; +import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.MockedConstruction; public class DataPlaneTokenSourceTest { private static final String TEST_ENDPOINT_1 = "https://endpoint1.databricks.com/"; @@ -197,4 +199,38 @@ void testDataPlaneTokenSource( assertTrue(token.isValid()); } } + + @Test + void testEndpointTokenSourceConstructionCount() throws Exception { + Token cpToken = new Token(TEST_CP_TOKEN, TEST_TOKEN_TYPE, null, LocalDateTime.now().plusSeconds(3600)); + DatabricksOAuthTokenSource mockCpTokenSource = mock(DatabricksOAuthTokenSource.class); + when(mockCpTokenSource.getToken()).thenReturn(cpToken); + + String successJson = "{\"access_token\":\"dp-access-token\",\"token_type\":\"Bearer\",\"refresh_token\":\"refresh-token\",\"expires_in\":3600}"; + HttpClient mockHttpClient = mock(HttpClient.class); + when(mockHttpClient.execute(any())).thenReturn(new Response(successJson, 200, "OK", new URL(TEST_ENDPOINT_1))); + + try (MockedConstruction mockedConstruction = mockConstruction(EndpointTokenSource.class)) { + DataPlaneTokenSource source = new DataPlaneTokenSource(mockHttpClient, mockCpTokenSource, TEST_HOST); + + // First call - should create new EndpointTokenSource + source.getToken(TEST_ENDPOINT_1, TEST_AUTH_DETAILS_1); + assertEquals(1, mockedConstruction.constructed().size(), "First call should create one EndpointTokenSource"); + + // Second call with same endpoint and auth details - should reuse existing EndpointTokenSource + source.getToken(TEST_ENDPOINT_1, TEST_AUTH_DETAILS_1); + assertEquals(1, mockedConstruction.constructed().size(), "This call should reuse the existing EndpointTokenSource"); + + // Call with different endpoint - should create new EndpointTokenSource + source.getToken(TEST_ENDPOINT_2, TEST_AUTH_DETAILS_2); + assertEquals(2, mockedConstruction.constructed().size(), "Different endpoint should create new EndpointTokenSource"); + + // Call with different auth details - should create new EndpointTokenSource + source.getToken(TEST_ENDPOINT_1, TEST_AUTH_DETAILS_2); + assertEquals(3, mockedConstruction.constructed().size(), "Different auth details should create new EndpointTokenSource"); + + source.getToken(TEST_ENDPOINT_2, TEST_AUTH_DETAILS_2); + assertEquals(3, mockedConstruction.constructed().size(), "This call should reuse the existing EndpointTokenSource"); + } + } } From b56c4d597e8dca41e1bdfbe6131ce33141add9d5 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Fri, 23 May 2025 16:48:47 +0000 Subject: [PATCH 10/31] Updated tests --- .../com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java index 5eb08ac93..a3724ff3d 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java @@ -201,7 +201,7 @@ void testDataPlaneTokenSource( } @Test - void testEndpointTokenSourceConstructionCount() throws Exception { + void testEndpointTokenSourceCaching() throws Exception { Token cpToken = new Token(TEST_CP_TOKEN, TEST_TOKEN_TYPE, null, LocalDateTime.now().plusSeconds(3600)); DatabricksOAuthTokenSource mockCpTokenSource = mock(DatabricksOAuthTokenSource.class); when(mockCpTokenSource.getToken()).thenReturn(cpToken); From 6213dd8ecd9757b8bc33c2a125b22e7bef490ca9 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Sat, 24 May 2025 18:20:02 +0000 Subject: [PATCH 11/31] Fix formatting --- .../com/databricks/sdk/WorkspaceClient.java | 3 +- .../com/databricks/sdk/core/ApiClient.java | 4 +- .../sdk/core/http/RequestModifier.java | 17 ++++---- .../sdk/core/oauth/TokenEndpointClient.java | 3 +- .../serving/ServingEndpointsDataPlaneAPI.java | 5 +-- .../ServingEndpointsDataPlaneImpl.java | 21 +++++----- .../core/oauth/DataPlaneTokenSourceTest.java | 40 ++++++++++++++----- .../core/oauth/EndpointTokenSourceTest.java | 4 -- .../serving/ServingDataplaneExample.java | 13 +++--- 9 files changed, 62 insertions(+), 48 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 6f4c5cd60..2db1a15e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -402,8 +402,7 @@ public WorkspaceClient(DatabricksConfig config) { servicePrincipalsAPI = new ServicePrincipalsAPI(apiClient); servingEndpointsAPI = new ServingEndpointsAPI(apiClient); servingEndpointsDataPlaneAPI = - new ServingEndpointsDataPlaneAPI( - apiClient, config, servingEndpointsAPI); + new ServingEndpointsDataPlaneAPI(apiClient, config, servingEndpointsAPI); settingsAPI = new SettingsAPI(apiClient); sharesAPI = new SharesAPI(apiClient); statementExecutionAPI = new StatementExecutionAPI(apiClient); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index bc99baddc..9a89ca974 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -250,7 +250,8 @@ private Response executeInner(Request in, String path, Optional } /** - * Executes HTTP request with retries and converts it to proper POJO, using custom request modifier + * Executes HTTP request with retries and converts it to proper POJO, using custom request + * modifier * * @param in Commons HTTP request * @param target Expected pojo type @@ -265,7 +266,6 @@ public T execute(Request in, Class target) throws IOException { return deserialize(out, target); } - public T execute(Request in, Class target, RequestModifier modifier) throws IOException { Response out = getResponse(in, modifier); if (target == Void.class) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java index 20550a68a..922155548 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java @@ -1,13 +1,12 @@ package com.databricks.sdk.core.http; +/** Interface for modifying a request */ +public interface RequestModifier { /** - * Interface for modifying a request + * Apply modifications to the request + * + * @param request The request to modify + * @return The modified request */ - public interface RequestModifier { - /** - * Apply modifications to the request - * @param request The request to modify - * @return The modified request - */ - Request modify(Request request); - } \ No newline at end of file + Request modify(Request request); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java index df5913fb9..3e0bc7c5c 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/TokenEndpointClient.java @@ -58,7 +58,8 @@ public static OAuthResponse requestToken( encodedParams.put("authorization_details", encodedAuthDetails); } catch (IOException e) { LOG.error("Failed to encode authorization details", e); - throw new DatabricksException("Failed to encode authorization details: " + e.getMessage(), e); + throw new DatabricksException( + "Failed to encode authorization details: " + e.getMessage(), e); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java index b2897150c..33f3f17b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java @@ -3,7 +3,6 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.DatabricksConfig; -import com.databricks.sdk.core.oauth.DataPlaneTokenSource; import com.databricks.sdk.support.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -20,9 +19,7 @@ public class ServingEndpointsDataPlaneAPI { /** Regular-use constructor */ public ServingEndpointsDataPlaneAPI( - ApiClient apiClient, - DatabricksConfig config, - ServingEndpointsAPI servingEndpointsAPI) { + ApiClient apiClient, DatabricksConfig config, ServingEndpointsAPI servingEndpointsAPI) { impl = new ServingEndpointsDataPlaneImpl(apiClient, config, servingEndpointsAPI); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index 61287528e..5aa529385 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -20,12 +20,12 @@ class ServingEndpointsDataPlaneImpl implements ServingEndpointsDataPlaneService private final ConcurrentHashMap infos; public ServingEndpointsDataPlaneImpl( - ApiClient apiClient, - DatabricksConfig config, - ServingEndpointsAPI controlPlane) { + ApiClient apiClient, DatabricksConfig config, ServingEndpointsAPI controlPlane) { this.apiClient = apiClient; this.controlPlane = controlPlane; - this.dataPlaneTokenSource = new DataPlaneTokenSource(apiClient.getHttpClient(), config.getTokenSource(), config.getHost()); + this.dataPlaneTokenSource = + new DataPlaneTokenSource( + apiClient.getHttpClient(), config.getTokenSource(), config.getHost()); this.infos = new ConcurrentHashMap<>(); } @@ -57,11 +57,14 @@ public QueryEndpointResponse query(QueryEndpointInput request) { req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, QueryEndpointResponse.class, r -> { - r.withHeader("Authorization", "Bearer " + token.getAccessToken()); - r.withUrl(dataPlaneInfo.getEndpointUrl()); - return r; - }); + return apiClient.execute( + req, + QueryEndpointResponse.class, + r -> { + r.withHeader("Authorization", "Bearer " + token.getAccessToken()); + r.withUrl(dataPlaneInfo.getEndpointUrl()); + return r; + }); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java index a3724ff3d..5887c4ee1 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/DataPlaneTokenSourceTest.java @@ -202,35 +202,55 @@ void testDataPlaneTokenSource( @Test void testEndpointTokenSourceCaching() throws Exception { - Token cpToken = new Token(TEST_CP_TOKEN, TEST_TOKEN_TYPE, null, LocalDateTime.now().plusSeconds(3600)); + Token cpToken = + new Token(TEST_CP_TOKEN, TEST_TOKEN_TYPE, null, LocalDateTime.now().plusSeconds(3600)); DatabricksOAuthTokenSource mockCpTokenSource = mock(DatabricksOAuthTokenSource.class); when(mockCpTokenSource.getToken()).thenReturn(cpToken); - String successJson = "{\"access_token\":\"dp-access-token\",\"token_type\":\"Bearer\",\"refresh_token\":\"refresh-token\",\"expires_in\":3600}"; + String successJson = + "{\"access_token\":\"dp-access-token\",\"token_type\":\"Bearer\",\"refresh_token\":\"refresh-token\",\"expires_in\":3600}"; HttpClient mockHttpClient = mock(HttpClient.class); - when(mockHttpClient.execute(any())).thenReturn(new Response(successJson, 200, "OK", new URL(TEST_ENDPOINT_1))); + when(mockHttpClient.execute(any())) + .thenReturn(new Response(successJson, 200, "OK", new URL(TEST_ENDPOINT_1))); - try (MockedConstruction mockedConstruction = mockConstruction(EndpointTokenSource.class)) { - DataPlaneTokenSource source = new DataPlaneTokenSource(mockHttpClient, mockCpTokenSource, TEST_HOST); + try (MockedConstruction mockedConstruction = + mockConstruction(EndpointTokenSource.class)) { + DataPlaneTokenSource source = + new DataPlaneTokenSource(mockHttpClient, mockCpTokenSource, TEST_HOST); // First call - should create new EndpointTokenSource source.getToken(TEST_ENDPOINT_1, TEST_AUTH_DETAILS_1); - assertEquals(1, mockedConstruction.constructed().size(), "First call should create one EndpointTokenSource"); + assertEquals( + 1, + mockedConstruction.constructed().size(), + "First call should create one EndpointTokenSource"); // Second call with same endpoint and auth details - should reuse existing EndpointTokenSource source.getToken(TEST_ENDPOINT_1, TEST_AUTH_DETAILS_1); - assertEquals(1, mockedConstruction.constructed().size(), "This call should reuse the existing EndpointTokenSource"); + assertEquals( + 1, + mockedConstruction.constructed().size(), + "This call should reuse the existing EndpointTokenSource"); // Call with different endpoint - should create new EndpointTokenSource source.getToken(TEST_ENDPOINT_2, TEST_AUTH_DETAILS_2); - assertEquals(2, mockedConstruction.constructed().size(), "Different endpoint should create new EndpointTokenSource"); + assertEquals( + 2, + mockedConstruction.constructed().size(), + "Different endpoint should create new EndpointTokenSource"); // Call with different auth details - should create new EndpointTokenSource source.getToken(TEST_ENDPOINT_1, TEST_AUTH_DETAILS_2); - assertEquals(3, mockedConstruction.constructed().size(), "Different auth details should create new EndpointTokenSource"); + assertEquals( + 3, + mockedConstruction.constructed().size(), + "Different auth details should create new EndpointTokenSource"); source.getToken(TEST_ENDPOINT_2, TEST_AUTH_DETAILS_2); - assertEquals(3, mockedConstruction.constructed().size(), "This call should reuse the existing EndpointTokenSource"); + assertEquals( + 3, + mockedConstruction.constructed().size(), + "This call should reuse the existing EndpointTokenSource"); } } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java index 319b90e71..a3af2254f 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/EndpointTokenSourceTest.java @@ -73,10 +73,6 @@ private static Stream provideEndpointTokenScenarios() throws Exceptio HttpClient mockIOExceptionClient = mock(HttpClient.class); when(mockIOExceptionClient.execute(any())).thenThrow(new IOException("Network error")); - // Mock OpenIDConnectEndpoints - OpenIDConnectEndpoints mockEndpoints = mock(OpenIDConnectEndpoints.class); - when(mockEndpoints.getTokenEndpoint()).thenReturn("https://test.databricks.com/oidc/v1/token"); - return Stream.of( Arguments.of( "Success response", diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java index 3ca872229..40e549a8e 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java @@ -16,10 +16,11 @@ public class ServingDataplaneExample { @Test void testQueryModelEndpoint() { // Use Databricks CLI authentication with required scopes - DatabricksConfig config = new DatabricksConfig() - .setAuthType("databricks-cli") - .setHost("https://e2-dogfood.staging.cloud.databricks.com") - .setDebugHeaders(true); + DatabricksConfig config = + new DatabricksConfig() + .setAuthType("databricks-cli") + .setHost("https://e2-dogfood.staging.cloud.databricks.com") + .setDebugHeaders(true); LOG.info("Creating WorkspaceClient with config: {}", config); WorkspaceClient client = new WorkspaceClient(config); @@ -42,9 +43,7 @@ void testQueryModelEndpoint() { // Create the query input object QueryEndpointInput queryInput = - new QueryEndpointInput() - .setName(endpointName) - .setInputs(new Map[]{inputData}); + new QueryEndpointInput().setName(endpointName).setInputs(new Map[] {inputData}); LOG.info("Querying endpoint {} with input: {}", endpointName, queryInput); From 6199ed982e1e1148b66e13d3252c90b3aefb98f8 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Sat, 24 May 2025 22:23:47 +0000 Subject: [PATCH 12/31] Final version --- .../com/databricks/sdk/core/ApiClient.java | 13 +++---- .../sdk/core/http/RequestModifier.java | 12 ------ .../sdk/core/http/RequestOptions.java | 37 +++++++++++++++++++ .../ServingEndpointsDataPlaneImpl.java | 17 ++++----- 4 files changed, 50 insertions(+), 29 deletions(-) delete mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index 9a89ca974..b14fac537 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -4,7 +4,7 @@ import com.databricks.sdk.core.error.PrivateLinkInfo; import com.databricks.sdk.core.http.HttpClient; import com.databricks.sdk.core.http.Request; -import com.databricks.sdk.core.http.RequestModifier; +import com.databricks.sdk.core.http.RequestOptions; import com.databricks.sdk.core.http.Response; import com.databricks.sdk.core.retry.RequestBasedRetryStrategyPicker; import com.databricks.sdk.core.retry.RetryStrategy; @@ -180,7 +180,7 @@ protected O withJavaType(Request request, JavaType javaType) { } } - private Response executeInner(Request in, String path, Optional modifier) { + private Response executeInner(Request in, String path, Optional options) { RetryStrategy retryStrategy = retryStrategyPicker.getRetryStrategy(in); int attemptNumber = 0; while (true) { @@ -205,9 +205,8 @@ private Response executeInner(Request in, String path, Optional } in.withHeader("User-Agent", userAgent); - if (modifier.isPresent()) { - System.out.println("Modifier is present"); - in = modifier.get().modify(in); + if (options.isPresent()) { + in = options.get().applyOptions(in); } // Make the request, catching any exceptions, as we may want to retry. @@ -266,7 +265,7 @@ public T execute(Request in, Class target) throws IOException { return deserialize(out, target); } - public T execute(Request in, Class target, RequestModifier modifier) throws IOException { + public T execute(Request in, Class target, RequestOptions modifier) throws IOException { Response out = getResponse(in, modifier); if (target == Void.class) { return null; @@ -278,7 +277,7 @@ private Response getResponse(Request in) { return executeInner(in, in.getUrl(), Optional.empty()); } - private Response getResponse(Request in, RequestModifier modifier) { + private Response getResponse(Request in, RequestOptions modifier) { return executeInner(in, in.getUrl(), Optional.of(modifier)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java deleted file mode 100644 index 922155548..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestModifier.java +++ /dev/null @@ -1,12 +0,0 @@ -package com.databricks.sdk.core.http; - -/** Interface for modifying a request */ -public interface RequestModifier { - /** - * Apply modifications to the request - * - * @param request The request to modify - * @return The modified request - */ - Request modify(Request request); -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java new file mode 100644 index 000000000..a75f4ac63 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java @@ -0,0 +1,37 @@ +package com.databricks.sdk.core.http; + +import java.util.function.Function; + +/** A class that allows modifying HTTP requests by applying transformation functions. */ +public class RequestOptions { + private Function authenticateFunc; + private Function urlFunc; + private Function userAgentFunc; + + public RequestOptions() { + // Default to identity functions + this.authenticateFunc = request -> request; + this.urlFunc = request -> request; + this.userAgentFunc = request -> request; + } + + public RequestOptions withAuthorization(String authorization) { + this.authenticateFunc = request -> request.withHeader("Authorization", authorization); + return this; + } + + public RequestOptions withUrl(String url) { + this.urlFunc = request -> request.withUrl(url); + return this; + } + + public RequestOptions withUserAgent(String userAgent) { + this.userAgentFunc = request -> request.withHeader("User-Agent", userAgent); + return this; + } + + public Request applyOptions(Request request) { + // Apply all transformation functions in sequence + return userAgentFunc.apply(urlFunc.apply(authenticateFunc.apply(request))); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index 5aa529385..8246353b8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -5,6 +5,7 @@ import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.core.http.RequestOptions; import com.databricks.sdk.core.oauth.DataPlaneTokenSource; import com.databricks.sdk.core.oauth.Token; import com.databricks.sdk.support.Generated; @@ -49,22 +50,18 @@ public QueryEndpointResponse query(QueryEndpointInput request) { dataPlaneTokenSource.getToken( dataPlaneInfo.getEndpointUrl(), dataPlaneInfo.getAuthorizationDetails()); - System.out.println("DP Token: " + token.getAccessToken()); - try { Request req = new Request("POST", dataPlaneInfo.getEndpointUrl(), apiClient.serialize(request)); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute( - req, - QueryEndpointResponse.class, - r -> { - r.withHeader("Authorization", "Bearer " + token.getAccessToken()); - r.withUrl(dataPlaneInfo.getEndpointUrl()); - return r; - }); + RequestOptions options = + new RequestOptions() + .withAuthorization("Bearer " + token.getAccessToken()) + .withUrl(dataPlaneInfo.getEndpointUrl()); + + return apiClient.execute(req, QueryEndpointResponse.class, options); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } From 98664de34813092cd55d7797bddbdfa68186b34f Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 11:34:14 +0000 Subject: [PATCH 13/31] Add unit tests --- .../sdk/core/http/RequestOptions.java | 37 +++++++- .../sdk/core/oauth/OAuthHeaderFactory.java | 11 +++ .../OAuthHeaderFactoryFromSuppliers.java | 9 +- .../OAuthHeaderFactoryFromTokenSource.java | 8 ++ .../sdk/core/http/RequestOptionsTest.java | 94 +++++++++++++++++++ .../OAuthHeaderFactoryFromSuppliersTest.java | 38 ++++++++ ...OAuthHeaderFactoryFromTokenSourceTest.java | 43 +++++++++ 7 files changed, 237 insertions(+), 3 deletions(-) create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java index a75f4ac63..c6258ebd1 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java @@ -2,12 +2,19 @@ import java.util.function.Function; -/** A class that allows modifying HTTP requests by applying transformation functions. */ +/** + * A builder class for configuring HTTP request transformations including authentication, + * URL, and user agent headers. + */ public class RequestOptions { private Function authenticateFunc; private Function urlFunc; private Function userAgentFunc; + /** + * Constructs a new RequestOptions instance with default identity functions. + * Initially, all transformations are set to pass through the request unchanged. + */ public RequestOptions() { // Default to identity functions this.authenticateFunc = request -> request; @@ -15,21 +22,49 @@ public RequestOptions() { this.userAgentFunc = request -> request; } + /** + * Sets the authorization header for the request. + * + * @param authorization The authorization value to be set in the header + * @return This RequestOptions instance for method chaining + */ public RequestOptions withAuthorization(String authorization) { this.authenticateFunc = request -> request.withHeader("Authorization", authorization); return this; } + /** + * Sets the URL for the request. + * + * @param url The URL to be set for the request + * @return This RequestOptions instance for method chaining + */ public RequestOptions withUrl(String url) { this.urlFunc = request -> request.withUrl(url); return this; } + /** + * Sets the User-Agent header for the request. + * + * @param userAgent The user agent string to be set in the header + * @return This RequestOptions instance for method chaining + */ public RequestOptions withUserAgent(String userAgent) { this.userAgentFunc = request -> request.withHeader("User-Agent", userAgent); return this; } + /** + * Applies all configured transformations to the given request. + * The transformations are applied in the following order: + * 1. Authentication + * 2. URL + * 3. User-Agent + * + * @param request The original request to be transformed + * @return A new Request instance with all transformations applied + */ public Request applyOptions(Request request) { // Apply all transformation functions in sequence return userAgentFunc.apply(urlFunc.apply(authenticateFunc.apply(request))); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java index 2d8a64eb2..107952a18 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java @@ -4,10 +4,18 @@ import java.util.Map; import java.util.function.Supplier; +/** + * Factory interface for creating OAuth authentication headers. + * This interface combines the functionality of {@link HeaderFactory} and {@link TokenSource}. + */ public interface OAuthHeaderFactory extends HeaderFactory, TokenSource { /** * Creates an OAuthHeaderFactory from separate token and header suppliers. This allows for custom * header generation beyond just the Authorization header. + * + * @param tokenSupplier A supplier that provides OAuth tokens + * @param headerSupplier A supplier that provides a map of header name-value pairs + * @return A new OAuthHeaderFactory instance that uses the provided suppliers */ static OAuthHeaderFactory fromSuppliers( Supplier tokenSupplier, Supplier> headerSupplier) { @@ -17,6 +25,9 @@ static OAuthHeaderFactory fromSuppliers( /** * Creates an OAuthHeaderFactory from a TokenSource. This is a convenience method for the common * case where headers are derived from the token. + * + * @param tokenSource The source of OAuth tokens + * @return A new OAuthHeaderFactory instance that uses the provided token source */ static OAuthHeaderFactory fromTokenSource(TokenSource tokenSource) { return new OAuthHeaderFactoryFromTokenSource(tokenSource); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java index 46c70ec77..9ff442517 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java @@ -4,13 +4,18 @@ import java.util.function.Supplier; /** - * A concrete implementation of OAuthHeaderFactory that uses suppliers for both token and header - * generation. This allows for custom header generation beyond just the Authorization header. + * Implementation of {@link OAuthHeaderFactory} that uses separate suppliers for token and header generation. */ public class OAuthHeaderFactoryFromSuppliers implements OAuthHeaderFactory { private final Supplier tokenSupplier; private final Supplier> headerSupplier; + /** + * Creates a new instance with the specified token and header suppliers. + * + * @param tokenSupplier Supplier for OAuth tokens. + * @param headerSupplier Supplier for headers. + */ public OAuthHeaderFactoryFromSuppliers( Supplier tokenSupplier, Supplier> headerSupplier) { this.tokenSupplier = tokenSupplier; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java index 7a9416edf..8ee91ec75 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java @@ -3,9 +3,17 @@ import java.util.HashMap; import java.util.Map; +/** + * Implementation of {@link OAuthHeaderFactory} that generates Authorization headers from a token source. + */ public class OAuthHeaderFactoryFromTokenSource implements OAuthHeaderFactory { private final TokenSource tokenSource; + /** + * Creates a new instance with the specified token source. + * + * @param tokenSource Source of OAuth tokens. + */ public OAuthHeaderFactoryFromTokenSource(TokenSource tokenSource) { this.tokenSource = tokenSource; } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java new file mode 100644 index 000000000..db675cac6 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java @@ -0,0 +1,94 @@ +package com.databricks.sdk.core.http; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import static org.junit.jupiter.api.Assertions.*; +import java.util.stream.Stream; + +public class RequestOptionsTest { + private static final String DEFAULT_METHOD = "GET"; + private static final String DEFAULT_URL = "https://example.com"; + private static final String DEFAULT_AUTH = "Bearer token123"; + private static final String DEFAULT_USER_AGENT = "TestAgent/1.0"; + private static final String NEW_URL = "https://new-url.com/api/v1"; + private static final String NEW_AUTH = "Bearer token456"; + private static final String NEW_USER_AGENT = "NewAgent/1.0"; + + private static Request createDefaultRequest() { + return new Request(DEFAULT_METHOD, DEFAULT_URL) + .withHeader("Authorization", DEFAULT_AUTH) + .withHeader("User-Agent", DEFAULT_USER_AGENT); + } + + private static Stream provideTestCases() { + return Stream.of( + // Default constructor test + Arguments.of( + "Default constructor should not modify request", + new RequestOptions(), + DEFAULT_AUTH, + DEFAULT_URL, + DEFAULT_USER_AGENT + ), + // Authorization header test + Arguments.of( + "Authorization header should be updated", + new RequestOptions().withAuthorization(NEW_AUTH), + NEW_AUTH, + DEFAULT_URL, + DEFAULT_USER_AGENT + ), + // URL test + Arguments.of( + "URL should be updated", + new RequestOptions().withUrl(NEW_URL), + DEFAULT_AUTH, + NEW_URL, + DEFAULT_USER_AGENT + ), + // User-Agent test + Arguments.of( + "User-Agent header should be updated", + new RequestOptions().withUserAgent(NEW_USER_AGENT), + DEFAULT_AUTH, + DEFAULT_URL, + NEW_USER_AGENT + ), + // Multiple options test + Arguments.of( + "Multiple options should be applied", + new RequestOptions() + .withAuthorization(NEW_AUTH) + .withUrl(NEW_URL) + .withUserAgent(NEW_USER_AGENT), + NEW_AUTH, + NEW_URL, + NEW_USER_AGENT + ) + ); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideTestCases") + public void testRequestOptions( + String testName, + RequestOptions options, + String expectedAuth, + String expectedUrl, + String expectedUserAgent) { + + Request originalRequest = createDefaultRequest(); + Request result = options.applyOptions(originalRequest); + + // Verify method is unchanged + assertEquals(DEFAULT_METHOD, result.getMethod()); + + // Verify URL + assertEquals(expectedUrl, result.getUrl()); + + // Verify headers + assertEquals(expectedAuth, result.getHeaders().get("Authorization")); + assertEquals(expectedUserAgent, result.getHeaders().get("User-Agent")); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java new file mode 100644 index 000000000..b403df2bc --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java @@ -0,0 +1,38 @@ +package com.databricks.sdk.core.oauth; + +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import java.util.Map; +import java.util.HashMap; +import java.util.function.Supplier; +import java.time.LocalDateTime; + +public class OAuthHeaderFactoryFromSuppliersTest { + + private static final String TOKEN_TYPE = "Bearer"; + private static final String TOKEN_VALUE = "test-token"; + + @Test + public void testTokenAndHeaders() { + Map expectedHeaders = new HashMap<>(); + expectedHeaders.put("Authorization", TOKEN_TYPE + " " + TOKEN_VALUE); + expectedHeaders.put("Content-Type", "application/json"); + + Supplier tokenSupplier = () -> new Token(TOKEN_VALUE, TOKEN_TYPE, LocalDateTime.now().plusHours(1)); + Supplier> headerSupplier = () -> new HashMap<>(expectedHeaders); + + OAuthHeaderFactoryFromSuppliers factory = new OAuthHeaderFactoryFromSuppliers( + tokenSupplier, headerSupplier); + + Token actualToken = factory.getToken(); + assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); + assertEquals(TOKEN_TYPE, actualToken.getTokenType()); + + Map actualHeaders = factory.headers(); + assertEquals(expectedHeaders.size(), actualHeaders.size(), "Header maps should have same size"); + expectedHeaders.forEach((key, value) -> + assertEquals(value, actualHeaders.get(key), + String.format("Header '%s' should have value '%s'", key, value))); + } +} + diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java new file mode 100644 index 000000000..102a84709 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java @@ -0,0 +1,43 @@ +package com.databricks.sdk.core.oauth; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.time.LocalDateTime; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +@ExtendWith(MockitoExtension.class) +public class OAuthHeaderFactoryFromTokenSourceTest { + + private static final String TOKEN_TYPE = "Bearer"; + private static final String TOKEN_VALUE = "test-token"; + + @Mock + private TokenSource tokenSource; + + @Test + public void testTokenAndHeaders() { + LocalDateTime expiry = LocalDateTime.now().plusHours(1); + Token token = new Token(TOKEN_VALUE, TOKEN_TYPE, expiry); + when(tokenSource.getToken()).thenReturn(token); + OAuthHeaderFactoryFromTokenSource factory = new OAuthHeaderFactoryFromTokenSource(tokenSource); + + Token actualToken = factory.getToken(); + assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); + assertEquals(TOKEN_TYPE, actualToken.getTokenType()); + + Map headers = factory.headers(); + assertNotNull(headers); + assertEquals(1, headers.size()); + assertEquals(TOKEN_TYPE + " " + TOKEN_VALUE, headers.get("Authorization")); + + // Verify token source was called exactly twice (once for getToken, once for headers) + verify(tokenSource, times(2)).getToken(); + } +} + From d59b79866c73773099a5b03b7ef2cf0b12abd418 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 12:12:06 +0000 Subject: [PATCH 14/31] Fix formatting --- .../com/databricks/sdk/core/ApiClient.java | 21 ++- .../sdk/core/http/RequestOptions.java | 15 +- .../sdk/core/oauth/OAuthHeaderFactory.java | 4 +- .../OAuthHeaderFactoryFromSuppliers.java | 3 +- .../OAuthHeaderFactoryFromTokenSource.java | 3 +- .../sdk/core/http/RequestOptionsTest.java | 167 +++++++++--------- .../OAuthHeaderFactoryFromSuppliersTest.java | 62 ++++--- ...OAuthHeaderFactoryFromTokenSourceTest.java | 63 ++++--- 8 files changed, 170 insertions(+), 168 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index b14fac537..63812b848 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -249,12 +249,10 @@ private Response executeInner(Request in, String path, Optional } /** - * Executes HTTP request with retries and converts it to proper POJO, using custom request - * modifier + * Executes HTTP request with retries and converts it to proper POJO. * * @param in Commons HTTP request * @param target Expected pojo type - * @param modifier Optional request modifier to customize request behavior * @return POJO of requested type */ public T execute(Request in, Class target) throws IOException { @@ -265,8 +263,17 @@ public T execute(Request in, Class target) throws IOException { return deserialize(out, target); } - public T execute(Request in, Class target, RequestOptions modifier) throws IOException { - Response out = getResponse(in, modifier); + /** + * Executes HTTP request with retries and converts it to proper POJO, using custom request + * options. + * + * @param in Commons HTTP request + * @param target Expected pojo type + * @param options Optional request options to customize request behavior + * @return POJO of requested type + */ + public T execute(Request in, Class target, RequestOptions options) throws IOException { + Response out = getResponse(in, options); if (target == Void.class) { return null; } @@ -277,8 +284,8 @@ private Response getResponse(Request in) { return executeInner(in, in.getUrl(), Optional.empty()); } - private Response getResponse(Request in, RequestOptions modifier) { - return executeInner(in, in.getUrl(), Optional.of(modifier)); + private Response getResponse(Request in, RequestOptions options) { + return executeInner(in, in.getUrl(), Optional.of(options)); } private boolean isRequestSuccessful(Response response, Exception e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java index c6258ebd1..eb1a015a6 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java @@ -3,8 +3,8 @@ import java.util.function.Function; /** - * A builder class for configuring HTTP request transformations including authentication, - * URL, and user agent headers. + * A builder class for configuring HTTP request transformations including authentication, URL, and + * user agent headers. */ public class RequestOptions { private Function authenticateFunc; @@ -12,8 +12,8 @@ public class RequestOptions { private Function userAgentFunc; /** - * Constructs a new RequestOptions instance with default identity functions. - * Initially, all transformations are set to pass through the request unchanged. + * Constructs a new RequestOptions instance with default identity functions. Initially, all + * transformations are set to pass through the request unchanged. */ public RequestOptions() { // Default to identity functions @@ -56,11 +56,8 @@ public RequestOptions withUserAgent(String userAgent) { } /** - * Applies all configured transformations to the given request. - * The transformations are applied in the following order: - * 1. Authentication - * 2. URL - * 3. User-Agent + * Applies all configured transformations to the given request. The transformations are applied in + * the following order: 1. Authentication 2. URL 3. User-Agent * * @param request The original request to be transformed * @return A new Request instance with all transformations applied diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java index 107952a18..13d65df4f 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java @@ -5,8 +5,8 @@ import java.util.function.Supplier; /** - * Factory interface for creating OAuth authentication headers. - * This interface combines the functionality of {@link HeaderFactory} and {@link TokenSource}. + * Factory interface for creating OAuth authentication headers. This interface combines the + * functionality of {@link HeaderFactory} and {@link TokenSource}. */ public interface OAuthHeaderFactory extends HeaderFactory, TokenSource { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java index 9ff442517..e1d6e9c48 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java @@ -4,7 +4,8 @@ import java.util.function.Supplier; /** - * Implementation of {@link OAuthHeaderFactory} that uses separate suppliers for token and header generation. + * Implementation of {@link OAuthHeaderFactory} that uses separate suppliers for token and header + * generation. */ public class OAuthHeaderFactoryFromSuppliers implements OAuthHeaderFactory { private final Supplier tokenSupplier; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java index 8ee91ec75..e58a6bbe9 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java @@ -4,7 +4,8 @@ import java.util.Map; /** - * Implementation of {@link OAuthHeaderFactory} that generates Authorization headers from a token source. + * Implementation of {@link OAuthHeaderFactory} that generates Authorization headers from a token + * source. */ public class OAuthHeaderFactoryFromTokenSource implements OAuthHeaderFactory { private final TokenSource tokenSource; diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java index db675cac6..2408dbc99 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/RequestOptionsTest.java @@ -1,94 +1,89 @@ package com.databricks.sdk.core.http; +import static org.junit.jupiter.api.Assertions.*; + +import java.util.stream.Stream; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import static org.junit.jupiter.api.Assertions.*; -import java.util.stream.Stream; public class RequestOptionsTest { - private static final String DEFAULT_METHOD = "GET"; - private static final String DEFAULT_URL = "https://example.com"; - private static final String DEFAULT_AUTH = "Bearer token123"; - private static final String DEFAULT_USER_AGENT = "TestAgent/1.0"; - private static final String NEW_URL = "https://new-url.com/api/v1"; - private static final String NEW_AUTH = "Bearer token456"; - private static final String NEW_USER_AGENT = "NewAgent/1.0"; - - private static Request createDefaultRequest() { - return new Request(DEFAULT_METHOD, DEFAULT_URL) - .withHeader("Authorization", DEFAULT_AUTH) - .withHeader("User-Agent", DEFAULT_USER_AGENT); - } - - private static Stream provideTestCases() { - return Stream.of( - // Default constructor test - Arguments.of( - "Default constructor should not modify request", - new RequestOptions(), - DEFAULT_AUTH, - DEFAULT_URL, - DEFAULT_USER_AGENT - ), - // Authorization header test - Arguments.of( - "Authorization header should be updated", - new RequestOptions().withAuthorization(NEW_AUTH), - NEW_AUTH, - DEFAULT_URL, - DEFAULT_USER_AGENT - ), - // URL test - Arguments.of( - "URL should be updated", - new RequestOptions().withUrl(NEW_URL), - DEFAULT_AUTH, - NEW_URL, - DEFAULT_USER_AGENT - ), - // User-Agent test - Arguments.of( - "User-Agent header should be updated", - new RequestOptions().withUserAgent(NEW_USER_AGENT), - DEFAULT_AUTH, - DEFAULT_URL, - NEW_USER_AGENT - ), - // Multiple options test - Arguments.of( - "Multiple options should be applied", - new RequestOptions() - .withAuthorization(NEW_AUTH) - .withUrl(NEW_URL) - .withUserAgent(NEW_USER_AGENT), - NEW_AUTH, - NEW_URL, - NEW_USER_AGENT - ) - ); - } - - @ParameterizedTest(name = "{0}") - @MethodSource("provideTestCases") - public void testRequestOptions( - String testName, - RequestOptions options, - String expectedAuth, - String expectedUrl, - String expectedUserAgent) { - - Request originalRequest = createDefaultRequest(); - Request result = options.applyOptions(originalRequest); - - // Verify method is unchanged - assertEquals(DEFAULT_METHOD, result.getMethod()); - - // Verify URL - assertEquals(expectedUrl, result.getUrl()); - - // Verify headers - assertEquals(expectedAuth, result.getHeaders().get("Authorization")); - assertEquals(expectedUserAgent, result.getHeaders().get("User-Agent")); - } + private static final String DEFAULT_METHOD = "GET"; + private static final String DEFAULT_URL = "https://example.com"; + private static final String DEFAULT_AUTH = "Bearer token123"; + private static final String DEFAULT_USER_AGENT = "TestAgent/1.0"; + private static final String NEW_URL = "https://new-url.com/api/v1"; + private static final String NEW_AUTH = "Bearer token456"; + private static final String NEW_USER_AGENT = "NewAgent/1.0"; + + private static Request createDefaultRequest() { + return new Request(DEFAULT_METHOD, DEFAULT_URL) + .withHeader("Authorization", DEFAULT_AUTH) + .withHeader("User-Agent", DEFAULT_USER_AGENT); + } + + private static Stream provideTestCases() { + return Stream.of( + // Default constructor test + Arguments.of( + "Default constructor should not modify request", + new RequestOptions(), + DEFAULT_AUTH, + DEFAULT_URL, + DEFAULT_USER_AGENT), + // Authorization header test + Arguments.of( + "Authorization header should be updated", + new RequestOptions().withAuthorization(NEW_AUTH), + NEW_AUTH, + DEFAULT_URL, + DEFAULT_USER_AGENT), + // URL test + Arguments.of( + "URL should be updated", + new RequestOptions().withUrl(NEW_URL), + DEFAULT_AUTH, + NEW_URL, + DEFAULT_USER_AGENT), + // User-Agent test + Arguments.of( + "User-Agent header should be updated", + new RequestOptions().withUserAgent(NEW_USER_AGENT), + DEFAULT_AUTH, + DEFAULT_URL, + NEW_USER_AGENT), + // Multiple options test + Arguments.of( + "Multiple options should be applied", + new RequestOptions() + .withAuthorization(NEW_AUTH) + .withUrl(NEW_URL) + .withUserAgent(NEW_USER_AGENT), + NEW_AUTH, + NEW_URL, + NEW_USER_AGENT)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideTestCases") + public void testRequestOptions( + String testName, + RequestOptions options, + String expectedAuth, + String expectedUrl, + String expectedUserAgent) { + + Request originalRequest = createDefaultRequest(); + Request result = options.applyOptions(originalRequest); + + // Verify method is unchanged + assertEquals(DEFAULT_METHOD, result.getMethod()); + + // Verify URL + assertEquals(expectedUrl, result.getUrl()); + + // Verify headers + assertEquals(expectedAuth, result.getHeaders().get("Authorization")); + assertEquals(expectedUserAgent, result.getHeaders().get("User-Agent")); + } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java index b403df2bc..18a91f628 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java @@ -1,38 +1,42 @@ package com.databricks.sdk.core.oauth; -import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; -import java.util.Map; + +import java.time.LocalDateTime; import java.util.HashMap; +import java.util.Map; import java.util.function.Supplier; -import java.time.LocalDateTime; +import org.junit.jupiter.api.Test; public class OAuthHeaderFactoryFromSuppliersTest { - - private static final String TOKEN_TYPE = "Bearer"; - private static final String TOKEN_VALUE = "test-token"; - - @Test - public void testTokenAndHeaders() { - Map expectedHeaders = new HashMap<>(); - expectedHeaders.put("Authorization", TOKEN_TYPE + " " + TOKEN_VALUE); - expectedHeaders.put("Content-Type", "application/json"); - - Supplier tokenSupplier = () -> new Token(TOKEN_VALUE, TOKEN_TYPE, LocalDateTime.now().plusHours(1)); - Supplier> headerSupplier = () -> new HashMap<>(expectedHeaders); - - OAuthHeaderFactoryFromSuppliers factory = new OAuthHeaderFactoryFromSuppliers( - tokenSupplier, headerSupplier); - - Token actualToken = factory.getToken(); - assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); - assertEquals(TOKEN_TYPE, actualToken.getTokenType()); - - Map actualHeaders = factory.headers(); - assertEquals(expectedHeaders.size(), actualHeaders.size(), "Header maps should have same size"); - expectedHeaders.forEach((key, value) -> - assertEquals(value, actualHeaders.get(key), + + private static final String TOKEN_TYPE = "Bearer"; + private static final String TOKEN_VALUE = "test-token"; + + @Test + public void testTokenAndHeaders() { + Map expectedHeaders = new HashMap<>(); + expectedHeaders.put("Authorization", TOKEN_TYPE + " " + TOKEN_VALUE); + expectedHeaders.put("Content-Type", "application/json"); + + Supplier tokenSupplier = + () -> new Token(TOKEN_VALUE, TOKEN_TYPE, LocalDateTime.now().plusHours(1)); + Supplier> headerSupplier = () -> new HashMap<>(expectedHeaders); + + OAuthHeaderFactoryFromSuppliers factory = + new OAuthHeaderFactoryFromSuppliers(tokenSupplier, headerSupplier); + + Token actualToken = factory.getToken(); + assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); + assertEquals(TOKEN_TYPE, actualToken.getTokenType()); + + Map actualHeaders = factory.headers(); + assertEquals(expectedHeaders.size(), actualHeaders.size(), "Header maps should have same size"); + expectedHeaders.forEach( + (key, value) -> + assertEquals( + value, + actualHeaders.get(key), String.format("Header '%s' should have value '%s'", key, value))); - } + } } - diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java index 102a84709..91b392520 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java @@ -1,43 +1,40 @@ package com.databricks.sdk.core.oauth; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.time.LocalDateTime; +import java.util.Map; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import java.time.LocalDateTime; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; - @ExtendWith(MockitoExtension.class) public class OAuthHeaderFactoryFromTokenSourceTest { - - private static final String TOKEN_TYPE = "Bearer"; - private static final String TOKEN_VALUE = "test-token"; - - @Mock - private TokenSource tokenSource; - - @Test - public void testTokenAndHeaders() { - LocalDateTime expiry = LocalDateTime.now().plusHours(1); - Token token = new Token(TOKEN_VALUE, TOKEN_TYPE, expiry); - when(tokenSource.getToken()).thenReturn(token); - OAuthHeaderFactoryFromTokenSource factory = new OAuthHeaderFactoryFromTokenSource(tokenSource); - - Token actualToken = factory.getToken(); - assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); - assertEquals(TOKEN_TYPE, actualToken.getTokenType()); - - Map headers = factory.headers(); - assertNotNull(headers); - assertEquals(1, headers.size()); - assertEquals(TOKEN_TYPE + " " + TOKEN_VALUE, headers.get("Authorization")); - - // Verify token source was called exactly twice (once for getToken, once for headers) - verify(tokenSource, times(2)).getToken(); - } -} + private static final String TOKEN_TYPE = "Bearer"; + private static final String TOKEN_VALUE = "test-token"; + + @Mock private TokenSource tokenSource; + + @Test + public void testTokenAndHeaders() { + LocalDateTime expiry = LocalDateTime.now().plusHours(1); + Token token = new Token(TOKEN_VALUE, TOKEN_TYPE, expiry); + when(tokenSource.getToken()).thenReturn(token); + OAuthHeaderFactoryFromTokenSource factory = new OAuthHeaderFactoryFromTokenSource(tokenSource); + + Token actualToken = factory.getToken(); + assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); + assertEquals(TOKEN_TYPE, actualToken.getTokenType()); + + Map headers = factory.headers(); + assertNotNull(headers); + assertEquals(1, headers.size()); + assertEquals(TOKEN_TYPE + " " + TOKEN_VALUE, headers.get("Authorization")); + + // Verify token source was called exactly twice (once for getToken, once for headers) + verify(tokenSource, times(2)).getToken(); + } +} From cd7d6d57df1cb0e561f1a3f8124aa994cc100fdd Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 12:41:39 +0000 Subject: [PATCH 15/31] Direct dataplane access with code generation --- .../com/databricks/sdk/WorkspaceClient.java | 165 ++++++++++++++++-- .../serving/ServingEndpointsDataPlaneAPI.java | 1 - .../ServingEndpointsDataPlaneImpl.java | 8 +- 3 files changed, 155 insertions(+), 19 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 31eadb91b..f36b6ff47 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -140,8 +140,11 @@ import com.databricks.sdk.service.ml.ModelRegistryService; import com.databricks.sdk.service.pipelines.PipelinesAPI; import com.databricks.sdk.service.pipelines.PipelinesService; +import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorsV2API; +import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorsV2Service; import com.databricks.sdk.service.serving.ServingEndpointsAPI; import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneAPI; +import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneService; import com.databricks.sdk.service.serving.ServingEndpointsService; import com.databricks.sdk.service.settings.CredentialsManagerAPI; import com.databricks.sdk.service.settings.CredentialsManagerService; @@ -281,6 +284,7 @@ public class WorkspaceClient { private ProviderProvidersAPI providerProvidersAPI; private ProvidersAPI providersAPI; private QualityMonitorsAPI qualityMonitorsAPI; + private QualityMonitorsV2API qualityMonitorsV2API; private QueriesAPI queriesAPI; private QueriesLegacyAPI queriesLegacyAPI; private QueryExecutionAPI queryExecutionAPI; @@ -328,107 +332,210 @@ public WorkspaceClient(DatabricksConfig config) { apiClient = new ApiClient(config); accessControlAPI = new AccessControlAPI(apiClient); + accountAccessControlProxyAPI = new AccountAccessControlProxyAPI(apiClient); + alertsAPI = new AlertsAPI(apiClient); + alertsLegacyAPI = new AlertsLegacyAPI(apiClient); + alertsV2API = new AlertsV2API(apiClient); + appsAPI = new AppsAPI(apiClient); + artifactAllowlistsAPI = new ArtifactAllowlistsAPI(apiClient); + catalogsAPI = new CatalogsAPI(apiClient); + cleanRoomAssetsAPI = new CleanRoomAssetsAPI(apiClient); + cleanRoomTaskRunsAPI = new CleanRoomTaskRunsAPI(apiClient); + cleanRoomsAPI = new CleanRoomsAPI(apiClient); + clusterPoliciesAPI = new ClusterPoliciesAPI(apiClient); + clustersAPI = new ClustersExt(apiClient); + commandExecutionAPI = new CommandExecutionAPI(apiClient); + connectionsAPI = new ConnectionsAPI(apiClient); + consumerFulfillmentsAPI = new ConsumerFulfillmentsAPI(apiClient); + consumerInstallationsAPI = new ConsumerInstallationsAPI(apiClient); + consumerListingsAPI = new ConsumerListingsAPI(apiClient); + consumerPersonalizationRequestsAPI = new ConsumerPersonalizationRequestsAPI(apiClient); + consumerProvidersAPI = new ConsumerProvidersAPI(apiClient); + credentialsAPI = new CredentialsAPI(apiClient); + credentialsManagerAPI = new CredentialsManagerAPI(apiClient); + currentUserAPI = new CurrentUserAPI(apiClient); + dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); + dashboardsAPI = new DashboardsAPI(apiClient); + dataSourcesAPI = new DataSourcesAPI(apiClient); + databaseInstancesAPI = new DatabaseInstancesAPI(apiClient); + dbfsAPI = new DbfsExt(apiClient); + dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); + experimentsAPI = new ExperimentsAPI(apiClient); + externalLocationsAPI = new ExternalLocationsAPI(apiClient); + filesAPI = new FilesAPI(apiClient); + functionsAPI = new FunctionsAPI(apiClient); + genieAPI = new GenieAPI(apiClient); + gitCredentialsAPI = new GitCredentialsAPI(apiClient); + globalInitScriptsAPI = new GlobalInitScriptsAPI(apiClient); + grantsAPI = new GrantsAPI(apiClient); + groupsAPI = new GroupsAPI(apiClient); + instancePoolsAPI = new InstancePoolsAPI(apiClient); + instanceProfilesAPI = new InstanceProfilesAPI(apiClient); + ipAccessListsAPI = new IpAccessListsAPI(apiClient); + jobsAPI = new JobsAPI(apiClient); + lakeviewAPI = new LakeviewAPI(apiClient); + lakeviewEmbeddedAPI = new LakeviewEmbeddedAPI(apiClient); + librariesAPI = new LibrariesAPI(apiClient); + metastoresAPI = new MetastoresAPI(apiClient); + modelRegistryAPI = new ModelRegistryAPI(apiClient); + modelVersionsAPI = new ModelVersionsAPI(apiClient); + notificationDestinationsAPI = new NotificationDestinationsAPI(apiClient); + onlineTablesAPI = new OnlineTablesAPI(apiClient); + permissionMigrationAPI = new PermissionMigrationAPI(apiClient); + permissionsAPI = new PermissionsAPI(apiClient); + pipelinesAPI = new PipelinesAPI(apiClient); + policyComplianceForClustersAPI = new PolicyComplianceForClustersAPI(apiClient); + policyComplianceForJobsAPI = new PolicyComplianceForJobsAPI(apiClient); + policyFamiliesAPI = new PolicyFamiliesAPI(apiClient); + providerExchangeFiltersAPI = new ProviderExchangeFiltersAPI(apiClient); + providerExchangesAPI = new ProviderExchangesAPI(apiClient); + providerFilesAPI = new ProviderFilesAPI(apiClient); + providerListingsAPI = new ProviderListingsAPI(apiClient); + providerPersonalizationRequestsAPI = new ProviderPersonalizationRequestsAPI(apiClient); + providerProviderAnalyticsDashboardsAPI = new ProviderProviderAnalyticsDashboardsAPI(apiClient); + providerProvidersAPI = new ProviderProvidersAPI(apiClient); + providersAPI = new ProvidersAPI(apiClient); + qualityMonitorsAPI = new QualityMonitorsAPI(apiClient); + + qualityMonitorsV2API = new QualityMonitorsV2API(apiClient); + queriesAPI = new QueriesAPI(apiClient); + queriesLegacyAPI = new QueriesLegacyAPI(apiClient); + queryExecutionAPI = new QueryExecutionAPI(apiClient); + queryHistoryAPI = new QueryHistoryAPI(apiClient); + queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient); + queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient); + recipientActivationAPI = new RecipientActivationAPI(apiClient); + recipientFederationPoliciesAPI = new RecipientFederationPoliciesAPI(apiClient); + recipientsAPI = new RecipientsAPI(apiClient); + redashConfigAPI = new RedashConfigAPI(apiClient); + registeredModelsAPI = new RegisteredModelsAPI(apiClient); + reposAPI = new ReposAPI(apiClient); + resourceQuotasAPI = new ResourceQuotasAPI(apiClient); + schemasAPI = new SchemasAPI(apiClient); + secretsAPI = new SecretsExt(apiClient); + servicePrincipalsAPI = new ServicePrincipalsAPI(apiClient); + servingEndpointsAPI = new ServingEndpointsAPI(apiClient); + servingEndpointsDataPlaneAPI = new ServingEndpointsDataPlaneAPI(apiClient, config, servingEndpointsAPI); + settingsAPI = new SettingsAPI(apiClient); + sharesAPI = new SharesAPI(apiClient); + statementExecutionAPI = new StatementExecutionAPI(apiClient); + storageCredentialsAPI = new StorageCredentialsAPI(apiClient); + systemSchemasAPI = new SystemSchemasAPI(apiClient); + tableConstraintsAPI = new TableConstraintsAPI(apiClient); + tablesAPI = new TablesAPI(apiClient); + temporaryTableCredentialsAPI = new TemporaryTableCredentialsAPI(apiClient); + tokenManagementAPI = new TokenManagementAPI(apiClient); + tokensAPI = new TokensAPI(apiClient); + usersAPI = new UsersAPI(apiClient); + vectorSearchEndpointsAPI = new VectorSearchEndpointsAPI(apiClient); + vectorSearchIndexesAPI = new VectorSearchIndexesAPI(apiClient); + volumesAPI = new VolumesAPI(apiClient); + warehousesAPI = new WarehousesAPI(apiClient); + workspaceAPI = new WorkspaceAPI(apiClient); + workspaceBindingsAPI = new WorkspaceBindingsAPI(apiClient); + workspaceConfAPI = new WorkspaceConfAPI(apiClient); + forecastingAPI = new ForecastingAPI(apiClient); } @@ -489,7 +596,7 @@ public AlertsV2API alertsV2() { } /** - * Apps run directly on a customer's Databricks instance, integrate with their data, use and + * Apps run directly on a customer’s Databricks instance, integrate with their data, use and * extend Databricks services, and enable users to interact through single sign-on. */ public AppsAPI apps() { @@ -505,7 +612,7 @@ public ArtifactAllowlistsAPI artifactAllowlists() { } /** - * A catalog is the first layer of Unity Catalog's three-level namespace. It's used to organize + * A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize * your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG * data permission. * @@ -533,7 +640,7 @@ public CleanRoomTaskRunsAPI cleanRoomTaskRuns() { /** * A clean room uses Delta Sharing and serverless compute to provide a secure and * privacy-protecting environment where multiple parties can work together on sensitive enterprise - * data without direct access to each other's data. + * data without direct access to each other’s data. */ public CleanRoomsAPI cleanRooms() { return cleanRoomsAPI; @@ -766,7 +873,7 @@ public ExperimentsAPI experiments() { * that authorizes access to the cloud storage path. Each external location is subject to Unity * Catalog access-control policies that control which users and groups can access the credential. * If a user does not have access to an external location in Unity Catalog, the request fails and - * Unity Catalog does not attempt to authenticate to your cloud tenant on the user's behalf. + * Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. * *

Databricks recommends using external locations rather than using storage credentials * directly. @@ -883,10 +990,10 @@ public GroupsAPI groups() { * *

Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, * ready-to-use instances. When a cluster is attached to a pool, cluster nodes are created using - * the pool's idle instances. If the pool has no idle instances, the pool expands by allocating a - * new instance from the instance provider in order to accommodate the cluster's request. When a + * the pool’s idle instances. If the pool has no idle instances, the pool expands by allocating a + * new instance from the instance provider in order to accommodate the cluster’s request. When a * cluster releases an instance, it returns to the pool and is free for another cluster to use. - * Only clusters attached to a pool can use that pool's idle instances. + * Only clusters attached to a pool can use that pool’s idle instances. * *

You can specify a different pool for the driver node and worker nodes, or use the same pool * for both. @@ -1225,6 +1332,11 @@ public QualityMonitorsAPI qualityMonitors() { return qualityMonitorsAPI; } + /** Manage data quality of UC objects (currently support `schema`) */ + public QualityMonitorsV2API qualityMonitorsV2() { + return qualityMonitorsV2API; + } + /** * The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL * object that includes the target SQL warehouse, query text, name, description, tags, and @@ -1355,7 +1467,7 @@ public RedashConfigAPI redashConfig() { * Catalog provide centralized access control, auditing, lineage, and discovery of ML models * across Databricks workspaces. * - *

An MLflow registered model resides in the third layer of Unity Catalog's three-level + *

An MLflow registered model resides in the third layer of Unity Catalog’s three-level * namespace. Registered models contain model versions, which correspond to actual ML models * (MLflow models). Creating new model versions currently requires use of the MLflow Python * client. Once model versions are created, you can load them for batch inference using MLflow @@ -1411,7 +1523,7 @@ public ResourceQuotasAPI resourceQuotas() { } /** - * A schema (also called a database) is the second layer of Unity Catalog's three-level namespace. + * A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. * A schema organizes tables, views and functions. To access (or list) a table or view in a * schema, users must have the USE_SCHEMA data permission on the schema and its parent catalog, * and they must have the SELECT permission on the table or view. @@ -1463,8 +1575,8 @@ public ServingEndpointsAPI servingEndpoints() { } /** - * The Serving Endpoints Data Plane API allows you to create, update, and delete model serving - * endpoints. + * Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints + * for Serving endpoints service. */ public ServingEndpointsDataPlaneAPI servingEndpointsDataPlane() { return servingEndpointsDataPlaneAPI; @@ -1586,7 +1698,7 @@ public StatementExecutionAPI statementExecution() { * data stored on your cloud tenant. Each storage credential is subject to Unity Catalog * access-control policies that control which users and groups can access the credential. If a * user does not have access to a storage credential in Unity Catalog, the request fails and Unity - * Catalog does not attempt to authenticate to your cloud tenant on the user's behalf. + * Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. * *

Databricks recommends using external locations rather than using storage credentials * directly. @@ -1625,7 +1737,7 @@ public TableConstraintsAPI tableConstraints() { } /** - * A table resides in the third layer of Unity Catalog's three-level namespace. It contains rows + * A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows * of data. To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the * schema, and they must have the USE_CATALOG permission on its parent catalog. To query a table, * users must have the SELECT permission on the table, and they must have the USE_CATALOG @@ -1681,7 +1793,7 @@ public TokensAPI tokens() { * or team by using your identity provider to create users and groups in Databricks workspace and * give them the proper level of access. When a user leaves your organization or no longer needs * access to Databricks workspace, admins can terminate the user in your identity provider and - * that user's account will also be removed from Databricks workspace. This ensures a consistent + * that user’s account will also be removed from Databricks workspace. This ensures a consistent * offboarding process and prevents unauthorized users from accessing sensitive data. */ public UsersAPI users() { @@ -2518,6 +2630,17 @@ public WorkspaceClient withQualityMonitorsAPI(QualityMonitorsAPI qualityMonitors return this; } + /** Replace the default QualityMonitorsV2Service with a custom implementation. */ + public WorkspaceClient withQualityMonitorsV2Impl(QualityMonitorsV2Service qualityMonitorsV2) { + return this.withQualityMonitorsV2API(new QualityMonitorsV2API(qualityMonitorsV2)); + } + + /** Replace the default QualityMonitorsV2API with a custom implementation. */ + public WorkspaceClient withQualityMonitorsV2API(QualityMonitorsV2API qualityMonitorsV2) { + this.qualityMonitorsV2API = qualityMonitorsV2; + return this; + } + /** Replace the default QueriesService with a custom implementation. */ public WorkspaceClient withQueriesImpl(QueriesService queries) { return this.withQueriesAPI(new QueriesAPI(queries)); @@ -2713,6 +2836,20 @@ public WorkspaceClient withServingEndpointsAPI(ServingEndpointsAPI servingEndpoi return this; } + /** Replace the default ServingEndpointsDataPlaneService with a custom implementation. */ + public WorkspaceClient withServingEndpointsDataPlaneImpl( + ServingEndpointsDataPlaneService servingEndpointsDataPlane) { + return this.withServingEndpointsDataPlaneAPI( + new ServingEndpointsDataPlaneAPI(servingEndpointsDataPlane)); + } + + /** Replace the default ServingEndpointsDataPlaneAPI with a custom implementation. */ + public WorkspaceClient withServingEndpointsDataPlaneAPI( + ServingEndpointsDataPlaneAPI servingEndpointsDataPlane) { + this.servingEndpointsDataPlaneAPI = servingEndpointsDataPlane; + return this; + } + /** Replace the default SettingsService with a custom implementation. */ public WorkspaceClient withSettingsImpl(SettingsService settings) { return this.withSettingsAPI(new SettingsAPI(settings)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java index 33f3f17b2..3afdc690c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneAPI.java @@ -20,7 +20,6 @@ public class ServingEndpointsDataPlaneAPI { /** Regular-use constructor */ public ServingEndpointsDataPlaneAPI( ApiClient apiClient, DatabricksConfig config, ServingEndpointsAPI servingEndpointsAPI) { - impl = new ServingEndpointsDataPlaneImpl(apiClient, config, servingEndpointsAPI); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index 8246353b8..b10083884 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -16,14 +16,14 @@ @Generated class ServingEndpointsDataPlaneImpl implements ServingEndpointsDataPlaneService { private final ApiClient apiClient; - private final ServingEndpointsAPI controlPlane; + private final ServingEndpointsAPI servingEndpointsAPI; private final DataPlaneTokenSource dataPlaneTokenSource; private final ConcurrentHashMap infos; public ServingEndpointsDataPlaneImpl( - ApiClient apiClient, DatabricksConfig config, ServingEndpointsAPI controlPlane) { + ApiClient apiClient, DatabricksConfig config, ServingEndpointsAPI servingEndpointsAPI) { this.apiClient = apiClient; - this.controlPlane = controlPlane; + this.servingEndpointsAPI = servingEndpointsAPI; this.dataPlaneTokenSource = new DataPlaneTokenSource( apiClient.getHttpClient(), config.getTokenSource(), config.getHost()); @@ -37,7 +37,7 @@ private DataPlaneInfo dataPlaneInfoQuery(QueryEndpointInput request) { key, k -> { ServingEndpointDetailed response = - controlPlane.get(new GetServingEndpointRequest().setName(request.getName())); + servingEndpointsAPI.get(new GetServingEndpointRequest().setName(request.getName())); return response.getDataPlaneInfo().getQueryInfo(); }); } From 21ab388ed7cf09fa2bcfa03c9dae611eb7aac739 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 13:15:11 +0000 Subject: [PATCH 16/31] Add unit tests to ErrorTokenSource and DatabricksConfig --- .../sdk/core/oauth/ErrorTokenSource.java | 15 ++++- .../sdk/core/DatabricksConfigTest.java | 55 +++++++++++++++++++ .../sdk/core/oauth/ErrorTokenSourceTest.java | 32 +++++++++++ 3 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java index 9fe4d79d6..1c8574958 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java @@ -1,6 +1,7 @@ package com.databricks.sdk.core.oauth; import com.databricks.sdk.core.DatabricksException; +import java.util.Objects; /** * A TokenSource implementation that always throws an error when attempting to get a token. This is @@ -9,10 +10,22 @@ public class ErrorTokenSource implements TokenSource { private final String errorMessage; + /** + * Constructs a new ErrorTokenSource with the specified error message. + * + * @param errorMessage The error message that will be thrown when attempting to get a token + * @throws NullPointerException if errorMessage is null + */ public ErrorTokenSource(String errorMessage) { - this.errorMessage = errorMessage; + this.errorMessage = Objects.requireNonNull(errorMessage, "errorMessage cannot be null"); } + /** + * Always throws a DatabricksException with the configured error message. + * + * @return never returns normally, always throws an exception + * @throws DatabricksException with the configured error message + */ @Override public Token getToken() { throw new DatabricksException(errorMessage); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java index e552a1427..c73356520 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java @@ -1,11 +1,18 @@ package com.databricks.sdk.core; import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; import com.databricks.sdk.core.commons.CommonsHttpClient; +import com.databricks.sdk.core.http.HttpClient; +import com.databricks.sdk.core.oauth.ErrorTokenSource; import com.databricks.sdk.core.oauth.OpenIDConnectEndpoints; +import com.databricks.sdk.core.oauth.OAuthHeaderFactory; +import com.databricks.sdk.core.oauth.Token; +import com.databricks.sdk.core.oauth.TokenSource; import com.databricks.sdk.core.utils.Environment; import java.io.IOException; +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -195,4 +202,52 @@ public void testClone() { assert newWorkspaceConfig.getClientId().equals("my-client-id"); assert newWorkspaceConfig.getClientSecret().equals("my-client-secret"); } + + @Test + public void testGetTokenSourceWithNonOAuth() { + HttpClient httpClient = mock(HttpClient.class); + HeaderFactory mockHeaderFactory = mock(HeaderFactory.class); + CredentialsProvider mockProvider = mock(CredentialsProvider.class); + when(mockProvider.authType()).thenReturn("test"); + when(mockProvider.configure(any())).thenReturn(mockHeaderFactory); + + DatabricksConfig config = + new DatabricksConfig() + .setHost("https://test.databricks.com") + .setHttpClient(httpClient) + .setCredentialsProvider(mockProvider); + + // This will set the headerFactory internally + config.authenticate(); + + TokenSource tokenSource = config.getTokenSource(); + assertTrue(tokenSource instanceof ErrorTokenSource); + DatabricksException exception = assertThrows(DatabricksException.class, () -> tokenSource.getToken()); + assertEquals("OAuth Token not supported for current auth type test", exception.getMessage()); + } + + + @Test + public void testGetTokenSourceWithOAuth() { + HttpClient httpClient = mock(HttpClient.class); + TokenSource mockTokenSource = mock(TokenSource.class); + when(mockTokenSource.getToken()).thenReturn(new Token("test-token", "Bearer", LocalDateTime.now().plusHours(1))); + OAuthHeaderFactory mockHeaderFactory = OAuthHeaderFactory.fromTokenSource(mockTokenSource); + CredentialsProvider mockProvider = mock(CredentialsProvider.class); + when(mockProvider.authType()).thenReturn("test"); + when(mockProvider.configure(any())).thenReturn(mockHeaderFactory); + + DatabricksConfig config = + new DatabricksConfig() + .setHost("https://test.databricks.com") + .setHttpClient(httpClient) + .setCredentialsProvider(mockProvider); + + // This will set the headerFactory internally + config.authenticate(); + + TokenSource tokenSource = config.getTokenSource(); + assertFalse(tokenSource instanceof ErrorTokenSource); + assertEquals(tokenSource.getToken().getAccessToken(), "test-token"); + } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java new file mode 100644 index 000000000..3438b79c1 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java @@ -0,0 +1,32 @@ +package com.databricks.sdk.core.oauth; + +import com.databricks.sdk.core.DatabricksException; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; + +public class ErrorTokenSourceTest { + + @Test + public void testGetTokenThrowsException() { + String errorMessage = "Test error message"; + ErrorTokenSource tokenSource = new ErrorTokenSource(errorMessage); + + DatabricksException exception = assertThrows( + DatabricksException.class, + () -> tokenSource.getToken(), + "Expected getToken() to throw DatabricksException" + ); + + assertEquals(errorMessage, exception.getMessage(), + "Exception message should match the one provided in constructor"); + } + + @Test + public void testConstructorWithNullErrorMessage() { + assertThrows( + NullPointerException.class, + () -> new ErrorTokenSource(null), + "Expected constructor to throw NullPointerException when error message is null" + ); + } +} From dc75702a505844c48e341838288226fd1daea648 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 13:51:50 +0000 Subject: [PATCH 17/31] Temporarily changed WorkspaceClient to only include the services relevant to direct dataplane access --- .../com/databricks/sdk/WorkspaceClient.java | 123 ------------------ .../sdk/core/oauth/ErrorTokenSource.java | 4 +- .../sdk/core/DatabricksConfigTest.java | 17 +-- .../sdk/core/oauth/ErrorTokenSourceTest.java | 48 +++---- 4 files changed, 36 insertions(+), 156 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index f36b6ff47..d4c066a69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -140,8 +140,6 @@ import com.databricks.sdk.service.ml.ModelRegistryService; import com.databricks.sdk.service.pipelines.PipelinesAPI; import com.databricks.sdk.service.pipelines.PipelinesService; -import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorsV2API; -import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorsV2Service; import com.databricks.sdk.service.serving.ServingEndpointsAPI; import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneAPI; import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneService; @@ -284,7 +282,6 @@ public class WorkspaceClient { private ProviderProvidersAPI providerProvidersAPI; private ProvidersAPI providersAPI; private QualityMonitorsAPI qualityMonitorsAPI; - private QualityMonitorsV2API qualityMonitorsV2API; private QueriesAPI queriesAPI; private QueriesLegacyAPI queriesLegacyAPI; private QueryExecutionAPI queryExecutionAPI; @@ -330,212 +327,108 @@ public WorkspaceClient() { public WorkspaceClient(DatabricksConfig config) { this.config = config; apiClient = new ApiClient(config); - accessControlAPI = new AccessControlAPI(apiClient); - accountAccessControlProxyAPI = new AccountAccessControlProxyAPI(apiClient); - alertsAPI = new AlertsAPI(apiClient); - alertsLegacyAPI = new AlertsLegacyAPI(apiClient); - alertsV2API = new AlertsV2API(apiClient); - appsAPI = new AppsAPI(apiClient); - artifactAllowlistsAPI = new ArtifactAllowlistsAPI(apiClient); - catalogsAPI = new CatalogsAPI(apiClient); - cleanRoomAssetsAPI = new CleanRoomAssetsAPI(apiClient); - cleanRoomTaskRunsAPI = new CleanRoomTaskRunsAPI(apiClient); - cleanRoomsAPI = new CleanRoomsAPI(apiClient); - clusterPoliciesAPI = new ClusterPoliciesAPI(apiClient); - clustersAPI = new ClustersExt(apiClient); - commandExecutionAPI = new CommandExecutionAPI(apiClient); - connectionsAPI = new ConnectionsAPI(apiClient); - consumerFulfillmentsAPI = new ConsumerFulfillmentsAPI(apiClient); - consumerInstallationsAPI = new ConsumerInstallationsAPI(apiClient); - consumerListingsAPI = new ConsumerListingsAPI(apiClient); - consumerPersonalizationRequestsAPI = new ConsumerPersonalizationRequestsAPI(apiClient); - consumerProvidersAPI = new ConsumerProvidersAPI(apiClient); - credentialsAPI = new CredentialsAPI(apiClient); - credentialsManagerAPI = new CredentialsManagerAPI(apiClient); - currentUserAPI = new CurrentUserAPI(apiClient); - dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); - dashboardsAPI = new DashboardsAPI(apiClient); - dataSourcesAPI = new DataSourcesAPI(apiClient); - databaseInstancesAPI = new DatabaseInstancesAPI(apiClient); - dbfsAPI = new DbfsExt(apiClient); - dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); - experimentsAPI = new ExperimentsAPI(apiClient); - externalLocationsAPI = new ExternalLocationsAPI(apiClient); - filesAPI = new FilesAPI(apiClient); - functionsAPI = new FunctionsAPI(apiClient); - genieAPI = new GenieAPI(apiClient); - gitCredentialsAPI = new GitCredentialsAPI(apiClient); - globalInitScriptsAPI = new GlobalInitScriptsAPI(apiClient); - grantsAPI = new GrantsAPI(apiClient); - groupsAPI = new GroupsAPI(apiClient); - instancePoolsAPI = new InstancePoolsAPI(apiClient); - instanceProfilesAPI = new InstanceProfilesAPI(apiClient); - ipAccessListsAPI = new IpAccessListsAPI(apiClient); - jobsAPI = new JobsAPI(apiClient); - lakeviewAPI = new LakeviewAPI(apiClient); - lakeviewEmbeddedAPI = new LakeviewEmbeddedAPI(apiClient); - librariesAPI = new LibrariesAPI(apiClient); - metastoresAPI = new MetastoresAPI(apiClient); - modelRegistryAPI = new ModelRegistryAPI(apiClient); - modelVersionsAPI = new ModelVersionsAPI(apiClient); - notificationDestinationsAPI = new NotificationDestinationsAPI(apiClient); - onlineTablesAPI = new OnlineTablesAPI(apiClient); - permissionMigrationAPI = new PermissionMigrationAPI(apiClient); - permissionsAPI = new PermissionsAPI(apiClient); - pipelinesAPI = new PipelinesAPI(apiClient); - policyComplianceForClustersAPI = new PolicyComplianceForClustersAPI(apiClient); - policyComplianceForJobsAPI = new PolicyComplianceForJobsAPI(apiClient); - policyFamiliesAPI = new PolicyFamiliesAPI(apiClient); - providerExchangeFiltersAPI = new ProviderExchangeFiltersAPI(apiClient); - providerExchangesAPI = new ProviderExchangesAPI(apiClient); - providerFilesAPI = new ProviderFilesAPI(apiClient); - providerListingsAPI = new ProviderListingsAPI(apiClient); - providerPersonalizationRequestsAPI = new ProviderPersonalizationRequestsAPI(apiClient); - providerProviderAnalyticsDashboardsAPI = new ProviderProviderAnalyticsDashboardsAPI(apiClient); - providerProvidersAPI = new ProviderProvidersAPI(apiClient); - providersAPI = new ProvidersAPI(apiClient); - qualityMonitorsAPI = new QualityMonitorsAPI(apiClient); - - qualityMonitorsV2API = new QualityMonitorsV2API(apiClient); - queriesAPI = new QueriesAPI(apiClient); - queriesLegacyAPI = new QueriesLegacyAPI(apiClient); - queryExecutionAPI = new QueryExecutionAPI(apiClient); - queryHistoryAPI = new QueryHistoryAPI(apiClient); - queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient); - queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient); - recipientActivationAPI = new RecipientActivationAPI(apiClient); - recipientFederationPoliciesAPI = new RecipientFederationPoliciesAPI(apiClient); - recipientsAPI = new RecipientsAPI(apiClient); - redashConfigAPI = new RedashConfigAPI(apiClient); - registeredModelsAPI = new RegisteredModelsAPI(apiClient); - reposAPI = new ReposAPI(apiClient); - resourceQuotasAPI = new ResourceQuotasAPI(apiClient); - schemasAPI = new SchemasAPI(apiClient); - secretsAPI = new SecretsExt(apiClient); - servicePrincipalsAPI = new ServicePrincipalsAPI(apiClient); - servingEndpointsAPI = new ServingEndpointsAPI(apiClient); - servingEndpointsDataPlaneAPI = new ServingEndpointsDataPlaneAPI(apiClient, config, servingEndpointsAPI); - settingsAPI = new SettingsAPI(apiClient); - sharesAPI = new SharesAPI(apiClient); - statementExecutionAPI = new StatementExecutionAPI(apiClient); - storageCredentialsAPI = new StorageCredentialsAPI(apiClient); - systemSchemasAPI = new SystemSchemasAPI(apiClient); - tableConstraintsAPI = new TableConstraintsAPI(apiClient); - tablesAPI = new TablesAPI(apiClient); - temporaryTableCredentialsAPI = new TemporaryTableCredentialsAPI(apiClient); - tokenManagementAPI = new TokenManagementAPI(apiClient); - tokensAPI = new TokensAPI(apiClient); - usersAPI = new UsersAPI(apiClient); - vectorSearchEndpointsAPI = new VectorSearchEndpointsAPI(apiClient); - vectorSearchIndexesAPI = new VectorSearchIndexesAPI(apiClient); - volumesAPI = new VolumesAPI(apiClient); - warehousesAPI = new WarehousesAPI(apiClient); - workspaceAPI = new WorkspaceAPI(apiClient); - workspaceBindingsAPI = new WorkspaceBindingsAPI(apiClient); - workspaceConfAPI = new WorkspaceConfAPI(apiClient); - forecastingAPI = new ForecastingAPI(apiClient); } @@ -1332,11 +1225,6 @@ public QualityMonitorsAPI qualityMonitors() { return qualityMonitorsAPI; } - /** Manage data quality of UC objects (currently support `schema`) */ - public QualityMonitorsV2API qualityMonitorsV2() { - return qualityMonitorsV2API; - } - /** * The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL * object that includes the target SQL warehouse, query text, name, description, tags, and @@ -2630,17 +2518,6 @@ public WorkspaceClient withQualityMonitorsAPI(QualityMonitorsAPI qualityMonitors return this; } - /** Replace the default QualityMonitorsV2Service with a custom implementation. */ - public WorkspaceClient withQualityMonitorsV2Impl(QualityMonitorsV2Service qualityMonitorsV2) { - return this.withQualityMonitorsV2API(new QualityMonitorsV2API(qualityMonitorsV2)); - } - - /** Replace the default QualityMonitorsV2API with a custom implementation. */ - public WorkspaceClient withQualityMonitorsV2API(QualityMonitorsV2API qualityMonitorsV2) { - this.qualityMonitorsV2API = qualityMonitorsV2; - return this; - } - /** Replace the default QueriesService with a custom implementation. */ public WorkspaceClient withQueriesImpl(QueriesService queries) { return this.withQueriesAPI(new QueriesAPI(queries)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java index 1c8574958..0add3d9c6 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/ErrorTokenSource.java @@ -12,7 +12,7 @@ public class ErrorTokenSource implements TokenSource { /** * Constructs a new ErrorTokenSource with the specified error message. - * + * * @param errorMessage The error message that will be thrown when attempting to get a token * @throws NullPointerException if errorMessage is null */ @@ -22,7 +22,7 @@ public ErrorTokenSource(String errorMessage) { /** * Always throws a DatabricksException with the configured error message. - * + * * @return never returns normally, always throws an exception * @throws DatabricksException with the configured error message */ diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java index c73356520..38b6fcd9c 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java @@ -6,8 +6,8 @@ import com.databricks.sdk.core.commons.CommonsHttpClient; import com.databricks.sdk.core.http.HttpClient; import com.databricks.sdk.core.oauth.ErrorTokenSource; -import com.databricks.sdk.core.oauth.OpenIDConnectEndpoints; import com.databricks.sdk.core.oauth.OAuthHeaderFactory; +import com.databricks.sdk.core.oauth.OpenIDConnectEndpoints; import com.databricks.sdk.core.oauth.Token; import com.databricks.sdk.core.oauth.TokenSource; import com.databricks.sdk.core.utils.Environment; @@ -210,7 +210,7 @@ public void testGetTokenSourceWithNonOAuth() { CredentialsProvider mockProvider = mock(CredentialsProvider.class); when(mockProvider.authType()).thenReturn("test"); when(mockProvider.configure(any())).thenReturn(mockHeaderFactory); - + DatabricksConfig config = new DatabricksConfig() .setHost("https://test.databricks.com") @@ -219,24 +219,25 @@ public void testGetTokenSourceWithNonOAuth() { // This will set the headerFactory internally config.authenticate(); - + TokenSource tokenSource = config.getTokenSource(); assertTrue(tokenSource instanceof ErrorTokenSource); - DatabricksException exception = assertThrows(DatabricksException.class, () -> tokenSource.getToken()); + DatabricksException exception = + assertThrows(DatabricksException.class, () -> tokenSource.getToken()); assertEquals("OAuth Token not supported for current auth type test", exception.getMessage()); } - @Test public void testGetTokenSourceWithOAuth() { HttpClient httpClient = mock(HttpClient.class); TokenSource mockTokenSource = mock(TokenSource.class); - when(mockTokenSource.getToken()).thenReturn(new Token("test-token", "Bearer", LocalDateTime.now().plusHours(1))); + when(mockTokenSource.getToken()) + .thenReturn(new Token("test-token", "Bearer", LocalDateTime.now().plusHours(1))); OAuthHeaderFactory mockHeaderFactory = OAuthHeaderFactory.fromTokenSource(mockTokenSource); CredentialsProvider mockProvider = mock(CredentialsProvider.class); when(mockProvider.authType()).thenReturn("test"); when(mockProvider.configure(any())).thenReturn(mockHeaderFactory); - + DatabricksConfig config = new DatabricksConfig() .setHost("https://test.databricks.com") @@ -245,7 +246,7 @@ public void testGetTokenSourceWithOAuth() { // This will set the headerFactory internally config.authenticate(); - + TokenSource tokenSource = config.getTokenSource(); assertFalse(tokenSource instanceof ErrorTokenSource); assertEquals(tokenSource.getToken().getAccessToken(), "test-token"); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java index 3438b79c1..5bf66f19c 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/ErrorTokenSourceTest.java @@ -1,32 +1,34 @@ package com.databricks.sdk.core.oauth; +import static org.junit.jupiter.api.Assertions.*; + import com.databricks.sdk.core.DatabricksException; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; public class ErrorTokenSourceTest { - - @Test - public void testGetTokenThrowsException() { - String errorMessage = "Test error message"; - ErrorTokenSource tokenSource = new ErrorTokenSource(errorMessage); - - DatabricksException exception = assertThrows( + + @Test + public void testGetTokenThrowsException() { + String errorMessage = "Test error message"; + ErrorTokenSource tokenSource = new ErrorTokenSource(errorMessage); + + DatabricksException exception = + assertThrows( DatabricksException.class, () -> tokenSource.getToken(), - "Expected getToken() to throw DatabricksException" - ); - - assertEquals(errorMessage, exception.getMessage(), - "Exception message should match the one provided in constructor"); - } - - @Test - public void testConstructorWithNullErrorMessage() { - assertThrows( - NullPointerException.class, - () -> new ErrorTokenSource(null), - "Expected constructor to throw NullPointerException when error message is null" - ); - } + "Expected getToken() to throw DatabricksException"); + + assertEquals( + errorMessage, + exception.getMessage(), + "Exception message should match the one provided in constructor"); + } + + @Test + public void testConstructorWithNullErrorMessage() { + assertThrows( + NullPointerException.class, + () -> new ErrorTokenSource(null), + "Expected constructor to throw NullPointerException when error message is null"); + } } From fdf130e2d34aa30349a5adfe938528565d9cb95b Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 13:57:54 +0000 Subject: [PATCH 18/31] Update change log --- NEXT_CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 808c01fcf..e055850be 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -3,6 +3,7 @@ ## Release v0.52.0 ### New Features and Improvements +* Added Direct-to-Dataplane API support, allowing users query route optimized model serving endpoints ([#453](https://github.com/databricks/databricks-sdk-java/pull/453)). ### Bug Fixes From a2ae11b85b6c5d09160fa62524cd8fbd7b7e2323 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 14:01:51 +0000 Subject: [PATCH 19/31] Fix very annoying formatting issue --- .../java/com/databricks/sdk/core/DatabricksConfig.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index af210c07c..b70f65112 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -409,17 +409,13 @@ public DatabricksConfig setAzureUseMsi(boolean azureUseMsi) { return this; } - /** - * @deprecated Use {@link #getAzureUseMsi()} instead. - */ + /** @deprecated Use {@link #getAzureUseMsi()} instead. */ @Deprecated() public boolean getAzureUseMSI() { return azureUseMsi; } - /** - * @deprecated Use {@link #setAzureUseMsi(boolean)} instead. - */ + /** @deprecated Use {@link #setAzureUseMsi(boolean)} instead. */ @Deprecated public DatabricksConfig setAzureUseMSI(boolean azureUseMsi) { this.azureUseMsi = azureUseMsi; From 9f98250d9b029887ddfdd911b96890c1c9efb50f Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 14:10:12 +0000 Subject: [PATCH 20/31] Small change to order of methods in ApiClient --- .../com/databricks/sdk/core/ApiClient.java | 80 +++++++++---------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index 63812b848..a6c2ef112 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -180,6 +180,46 @@ protected O withJavaType(Request request, JavaType javaType) { } } + /** + * Executes HTTP request with retries and converts it to proper POJO. + * + * @param in Commons HTTP request + * @param target Expected pojo type + * @return POJO of requested type + */ + public T execute(Request in, Class target) throws IOException { + Response out = getResponse(in); + if (target == Void.class) { + return null; + } + return deserialize(out, target); + } + + /** + * Executes HTTP request with retries and converts it to proper POJO, using custom request + * options. + * + * @param in Commons HTTP request + * @param target Expected pojo type + * @param options Optional request options to customize request behavior + * @return POJO of requested type + */ + public T execute(Request in, Class target, RequestOptions options) throws IOException { + Response out = getResponse(in, options); + if (target == Void.class) { + return null; + } + return deserialize(out, target); + } + + private Response getResponse(Request in) { + return executeInner(in, in.getUrl(), Optional.empty()); + } + + private Response getResponse(Request in, RequestOptions options) { + return executeInner(in, in.getUrl(), Optional.of(options)); + } + private Response executeInner(Request in, String path, Optional options) { RetryStrategy retryStrategy = retryStrategyPicker.getRetryStrategy(in); int attemptNumber = 0; @@ -248,46 +288,6 @@ private Response executeInner(Request in, String path, Optional } } - /** - * Executes HTTP request with retries and converts it to proper POJO. - * - * @param in Commons HTTP request - * @param target Expected pojo type - * @return POJO of requested type - */ - public T execute(Request in, Class target) throws IOException { - Response out = getResponse(in); - if (target == Void.class) { - return null; - } - return deserialize(out, target); - } - - /** - * Executes HTTP request with retries and converts it to proper POJO, using custom request - * options. - * - * @param in Commons HTTP request - * @param target Expected pojo type - * @param options Optional request options to customize request behavior - * @return POJO of requested type - */ - public T execute(Request in, Class target, RequestOptions options) throws IOException { - Response out = getResponse(in, options); - if (target == Void.class) { - return null; - } - return deserialize(out, target); - } - - private Response getResponse(Request in) { - return executeInner(in, in.getUrl(), Optional.empty()); - } - - private Response getResponse(Request in, RequestOptions options) { - return executeInner(in, in.getUrl(), Optional.of(options)); - } - private boolean isRequestSuccessful(Response response, Exception e) { return e == null && response.getStatusCode() >= 200 From 4a0b8fd86761f3c89415bb52f9570598c6139298 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 17:08:26 +0200 Subject: [PATCH 21/31] Delete databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java Delete temp example --- .../serving/ServingDataplaneExample.java | 66 ------------------- 1 file changed, 66 deletions(-) delete mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java deleted file mode 100644 index 40e549a8e..000000000 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/serving/ServingDataplaneExample.java +++ /dev/null @@ -1,66 +0,0 @@ -package com.databricks.sdk.service.serving; - -import static org.junit.jupiter.api.Assertions.*; - -import com.databricks.sdk.WorkspaceClient; -import com.databricks.sdk.core.DatabricksConfig; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ServingDataplaneExample { - private static final Logger LOG = LoggerFactory.getLogger(ServingDataplaneExample.class); - - @Test - void testQueryModelEndpoint() { - // Use Databricks CLI authentication with required scopes - DatabricksConfig config = - new DatabricksConfig() - .setAuthType("databricks-cli") - .setHost("https://e2-dogfood.staging.cloud.databricks.com") - .setDebugHeaders(true); - - LOG.info("Creating WorkspaceClient with config: {}", config); - WorkspaceClient client = new WorkspaceClient(config); - - // Initialize the ServingEndpointsDataPlaneAPI - ServingEndpointsDataPlaneAPI servingEndpointsDataPlaneAPI = client.servingEndpointsDataPlane(); - - // Example: Query a model endpoint - String endpointName = "TestDirectDataplaneEmmy"; // Replace with your endpoint name - - try { - // Create input data for trip prediction - Map inputData = new HashMap<>(); - inputData.put("trip_distance", 2.5); - inputData.put("pickup_zip", "10001"); - inputData.put("dropoff_zip", "10002"); - - LOG.info("Preparing to send request to endpoint: {}", endpointName); - LOG.info("Request payload: {}", inputData); - - // Create the query input object - QueryEndpointInput queryInput = - new QueryEndpointInput().setName(endpointName).setInputs(new Map[] {inputData}); - - LOG.info("Querying endpoint {} with input: {}", endpointName, queryInput); - - // Query the endpoint - QueryEndpointResponse response = servingEndpointsDataPlaneAPI.query(queryInput); - - // Add assertions to verify the response - assertNotNull(response, "Response should not be null"); - assertNotNull(response.getPredictions(), "Response predictions should not be null"); - assertFalse(response.getPredictions().isEmpty(), "Response predictions should not be empty"); - - // Print the response for debugging purposes - LOG.info("Model Response: {}", response.getPredictions()); - - } catch (Exception e) { - LOG.error("Test failed with exception", e); - fail("Test failed with exception: " + e.getMessage(), e); - } - } -} From 5655ae5cf14eff091302cfe7ca1c1cd787c5a352 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 17:09:38 +0000 Subject: [PATCH 22/31] Small fix --- .../sdk/service/serving/ServingEndpointsDataPlaneImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index b10083884..d65f35559 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -53,6 +53,7 @@ public QueryEndpointResponse query(QueryEndpointInput request) { try { Request req = new Request("POST", dataPlaneInfo.getEndpointUrl(), apiClient.serialize(request)); + ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); From ef392fdbb311de180d347a455b3106c5de76dc40 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 17:15:46 +0000 Subject: [PATCH 23/31] Update ApiClient --- .../com/databricks/sdk/core/ApiClient.java | 20 +++++-------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index a6c2ef112..9a13a8f23 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -173,7 +173,7 @@ public Map getStringMap(Request req) { protected O withJavaType(Request request, JavaType javaType) { try { - Response response = getResponse(request); + Response response = getResponse(request, new RequestOptions()); return deserialize(response.getBody(), javaType); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -188,11 +188,7 @@ protected O withJavaType(Request request, JavaType javaType) { * @return POJO of requested type */ public T execute(Request in, Class target) throws IOException { - Response out = getResponse(in); - if (target == Void.class) { - return null; - } - return deserialize(out, target); + return execute(in, target, new RequestOptions()); } /** @@ -212,15 +208,11 @@ public T execute(Request in, Class target, RequestOptions options) throws return deserialize(out, target); } - private Response getResponse(Request in) { - return executeInner(in, in.getUrl(), Optional.empty()); - } - private Response getResponse(Request in, RequestOptions options) { - return executeInner(in, in.getUrl(), Optional.of(options)); + return executeInner(in, in.getUrl(), options); } - private Response executeInner(Request in, String path, Optional options) { + private Response executeInner(Request in, String path, RequestOptions options) { RetryStrategy retryStrategy = retryStrategyPicker.getRetryStrategy(in); int attemptNumber = 0; while (true) { @@ -245,9 +237,7 @@ private Response executeInner(Request in, String path, Optional } in.withHeader("User-Agent", userAgent); - if (options.isPresent()) { - in = options.get().applyOptions(in); - } + options.applyOptions(in); // Make the request, catching any exceptions, as we may want to retry. try { From 37776419bd8cce2fb1d80f812c0cf7d6ce0bff6a Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 17:42:39 +0000 Subject: [PATCH 24/31] Fix typo in changelog --- NEXT_CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index e055850be..96abd5fc1 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -3,7 +3,7 @@ ## Release v0.52.0 ### New Features and Improvements -* Added Direct-to-Dataplane API support, allowing users query route optimized model serving endpoints ([#453](https://github.com/databricks/databricks-sdk-java/pull/453)). +* Added Direct-to-Dataplane API support, allowing users to query route optimized model serving endpoints ([#453](https://github.com/databricks/databricks-sdk-java/pull/453)). ### Bug Fixes From b2eb8abb28dad387eaa0059e3b8bc1bffe228419 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 18:22:32 +0000 Subject: [PATCH 25/31] Updated imp template --- .../serving/ServingEndpointsDataPlaneImpl.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index d65f35559..b7d4c3be1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -31,7 +31,9 @@ public ServingEndpointsDataPlaneImpl( } private DataPlaneInfo dataPlaneInfoQuery(QueryEndpointInput request) { - String key = String.format("Query/%s", request.getName()); + String key = + String.format( + "Query/%s", String.join("/", new String[] {String.valueOf(request.getName())})); return infos.computeIfAbsent( key, @@ -45,22 +47,21 @@ private DataPlaneInfo dataPlaneInfoQuery(QueryEndpointInput request) { @Override public QueryEndpointResponse query(QueryEndpointInput request) { DataPlaneInfo dataPlaneInfo = dataPlaneInfoQuery(request); - + String path = dataPlaneInfo.getEndpointUrl(); Token token = dataPlaneTokenSource.getToken( dataPlaneInfo.getEndpointUrl(), dataPlaneInfo.getAuthorizationDetails()); try { - Request req = - new Request("POST", dataPlaneInfo.getEndpointUrl(), apiClient.serialize(request)); + Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); RequestOptions options = new RequestOptions() - .withAuthorization("Bearer " + token.getAccessToken()) - .withUrl(dataPlaneInfo.getEndpointUrl()); + .withAuthorization(token.getTokenType() + " " + token.getAccessToken()) + .withUrl(path); return apiClient.execute(req, QueryEndpointResponse.class, options); } catch (IOException e) { From 8c921522a0c91bbb58aae8298c61015875349303 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Mon, 26 May 2025 22:54:34 +0000 Subject: [PATCH 26/31] Remove getReponse() from ApiClient --- .../src/main/java/com/databricks/sdk/core/ApiClient.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index 9a13a8f23..2d4eeadc0 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -173,7 +173,7 @@ public Map getStringMap(Request req) { protected O withJavaType(Request request, JavaType javaType) { try { - Response response = getResponse(request, new RequestOptions()); + Response response = executeInner(request, request.getUrl(), new RequestOptions()); return deserialize(response.getBody(), javaType); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -201,17 +201,13 @@ public T execute(Request in, Class target) throws IOException { * @return POJO of requested type */ public T execute(Request in, Class target, RequestOptions options) throws IOException { - Response out = getResponse(in, options); + Response out = executeInner(in, in.getUrl(), options); if (target == Void.class) { return null; } return deserialize(out, target); } - private Response getResponse(Request in, RequestOptions options) { - return executeInner(in, in.getUrl(), options); - } - private Response executeInner(Request in, String path, RequestOptions options) { RetryStrategy retryStrategy = retryStrategyPicker.getRetryStrategy(in); int attemptNumber = 0; From b8564f22809680b7a0b7f67635ac4f5ef493d141 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Tue, 27 May 2025 10:39:26 +0200 Subject: [PATCH 27/31] Update databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java Update class description of RequestOptions Co-authored-by: Renaud Hartert --- .../main/java/com/databricks/sdk/core/http/RequestOptions.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java index eb1a015a6..dc6c06602 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java @@ -5,6 +5,8 @@ /** * A builder class for configuring HTTP request transformations including authentication, URL, and * user agent headers. + * + * Experimental: this class is experimental and subject to change in backward incompatible ways. */ public class RequestOptions { private Function authenticateFunc; From 5eadb663a4af92750220bf2b8fe9400b2f87b0b8 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Tue, 27 May 2025 09:12:21 +0000 Subject: [PATCH 28/31] Update OAuthHeaderFactory --- .../databricks/sdk/core/DatabricksConfig.java | 21 ++-- .../sdk/core/http/RequestOptions.java | 2 +- .../sdk/core/oauth/OAuthHeaderFactory.java | 28 +++++- .../OAuthHeaderFactoryFromSuppliers.java | 35 ------- .../OAuthHeaderFactoryFromTokenSource.java | 34 ------- .../OAuthHeaderFactoryFromSuppliersTest.java | 42 -------- ...OAuthHeaderFactoryFromTokenSourceTest.java | 40 -------- .../core/oauth/OAuthHeaderFactoryTest.java | 96 +++++++++++++++++++ 8 files changed, 134 insertions(+), 164 deletions(-) delete mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java delete mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java delete mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java delete mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryTest.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index b70f65112..de6548982 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -213,20 +213,21 @@ public synchronized Map authenticate() throws DatabricksExceptio } public TokenSource getTokenSource() { - try { - if (headerFactory == null) { + if (headerFactory == null) { + try { ConfigLoader.fixHostIfNeeded(this); headerFactory = credentialsProvider.configure(this); - setAuthType(credentialsProvider.authType()); + } catch (Exception e) { + return new ErrorTokenSource("Failed to get token source: " + e.getMessage()); } - if (headerFactory instanceof OAuthHeaderFactory) { - return (TokenSource) headerFactory; - } - return new ErrorTokenSource( - String.format("OAuth Token not supported for current auth type %s", authType)); - } catch (Exception e) { - return new ErrorTokenSource("Failed to get token source: " + e.getMessage()); + setAuthType(credentialsProvider.authType()); + } + + if (headerFactory instanceof OAuthHeaderFactory) { + return (TokenSource) headerFactory; } + return new ErrorTokenSource( + String.format("OAuth Token not supported for current auth type %s", authType)); } public CredentialsProvider getCredentialsProvider() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java index dc6c06602..a9f6c487c 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/RequestOptions.java @@ -6,7 +6,7 @@ * A builder class for configuring HTTP request transformations including authentication, URL, and * user agent headers. * - * Experimental: this class is experimental and subject to change in backward incompatible ways. + *

Experimental: this class is experimental and subject to change in backward incompatible ways. */ public class RequestOptions { private Function authenticateFunc; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java index 13d65df4f..614614c55 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactory.java @@ -1,6 +1,7 @@ package com.databricks.sdk.core.oauth; import com.databricks.sdk.core.HeaderFactory; +import java.util.HashMap; import java.util.Map; import java.util.function.Supplier; @@ -19,7 +20,17 @@ public interface OAuthHeaderFactory extends HeaderFactory, TokenSource { */ static OAuthHeaderFactory fromSuppliers( Supplier tokenSupplier, Supplier> headerSupplier) { - return new OAuthHeaderFactoryFromSuppliers(tokenSupplier, headerSupplier); + return new OAuthHeaderFactory() { + @Override + public Map headers() { + return headerSupplier.get(); + } + + @Override + public Token getToken() { + return tokenSupplier.get(); + } + }; } /** @@ -30,6 +41,19 @@ static OAuthHeaderFactory fromSuppliers( * @return A new OAuthHeaderFactory instance that uses the provided token source */ static OAuthHeaderFactory fromTokenSource(TokenSource tokenSource) { - return new OAuthHeaderFactoryFromTokenSource(tokenSource); + return new OAuthHeaderFactory() { + @Override + public Token getToken() { + return tokenSource.getToken(); + } + + @Override + public Map headers() { + Token token = tokenSource.getToken(); + Map headers = new HashMap<>(); + headers.put("Authorization", token.getTokenType() + " " + token.getAccessToken()); + return headers; + } + }; } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java deleted file mode 100644 index e1d6e9c48..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliers.java +++ /dev/null @@ -1,35 +0,0 @@ -package com.databricks.sdk.core.oauth; - -import java.util.Map; -import java.util.function.Supplier; - -/** - * Implementation of {@link OAuthHeaderFactory} that uses separate suppliers for token and header - * generation. - */ -public class OAuthHeaderFactoryFromSuppliers implements OAuthHeaderFactory { - private final Supplier tokenSupplier; - private final Supplier> headerSupplier; - - /** - * Creates a new instance with the specified token and header suppliers. - * - * @param tokenSupplier Supplier for OAuth tokens. - * @param headerSupplier Supplier for headers. - */ - public OAuthHeaderFactoryFromSuppliers( - Supplier tokenSupplier, Supplier> headerSupplier) { - this.tokenSupplier = tokenSupplier; - this.headerSupplier = headerSupplier; - } - - @Override - public Map headers() { - return headerSupplier.get(); - } - - @Override - public Token getToken() { - return tokenSupplier.get(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java deleted file mode 100644 index e58a6bbe9..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSource.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.databricks.sdk.core.oauth; - -import java.util.HashMap; -import java.util.Map; - -/** - * Implementation of {@link OAuthHeaderFactory} that generates Authorization headers from a token - * source. - */ -public class OAuthHeaderFactoryFromTokenSource implements OAuthHeaderFactory { - private final TokenSource tokenSource; - - /** - * Creates a new instance with the specified token source. - * - * @param tokenSource Source of OAuth tokens. - */ - public OAuthHeaderFactoryFromTokenSource(TokenSource tokenSource) { - this.tokenSource = tokenSource; - } - - @Override - public Token getToken() { - return tokenSource.getToken(); - } - - @Override - public Map headers() { - Token token = tokenSource.getToken(); - Map headers = new HashMap<>(); - headers.put("Authorization", token.getTokenType() + " " + token.getAccessToken()); - return headers; - } -} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java deleted file mode 100644 index 18a91f628..000000000 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromSuppliersTest.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.databricks.sdk.core.oauth; - -import static org.junit.jupiter.api.Assertions.*; - -import java.time.LocalDateTime; -import java.util.HashMap; -import java.util.Map; -import java.util.function.Supplier; -import org.junit.jupiter.api.Test; - -public class OAuthHeaderFactoryFromSuppliersTest { - - private static final String TOKEN_TYPE = "Bearer"; - private static final String TOKEN_VALUE = "test-token"; - - @Test - public void testTokenAndHeaders() { - Map expectedHeaders = new HashMap<>(); - expectedHeaders.put("Authorization", TOKEN_TYPE + " " + TOKEN_VALUE); - expectedHeaders.put("Content-Type", "application/json"); - - Supplier tokenSupplier = - () -> new Token(TOKEN_VALUE, TOKEN_TYPE, LocalDateTime.now().plusHours(1)); - Supplier> headerSupplier = () -> new HashMap<>(expectedHeaders); - - OAuthHeaderFactoryFromSuppliers factory = - new OAuthHeaderFactoryFromSuppliers(tokenSupplier, headerSupplier); - - Token actualToken = factory.getToken(); - assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); - assertEquals(TOKEN_TYPE, actualToken.getTokenType()); - - Map actualHeaders = factory.headers(); - assertEquals(expectedHeaders.size(), actualHeaders.size(), "Header maps should have same size"); - expectedHeaders.forEach( - (key, value) -> - assertEquals( - value, - actualHeaders.get(key), - String.format("Header '%s' should have value '%s'", key, value))); - } -} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java deleted file mode 100644 index 91b392520..000000000 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryFromTokenSourceTest.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.databricks.sdk.core.oauth; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; - -import java.time.LocalDateTime; -import java.util.Map; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -public class OAuthHeaderFactoryFromTokenSourceTest { - - private static final String TOKEN_TYPE = "Bearer"; - private static final String TOKEN_VALUE = "test-token"; - - @Mock private TokenSource tokenSource; - - @Test - public void testTokenAndHeaders() { - LocalDateTime expiry = LocalDateTime.now().plusHours(1); - Token token = new Token(TOKEN_VALUE, TOKEN_TYPE, expiry); - when(tokenSource.getToken()).thenReturn(token); - OAuthHeaderFactoryFromTokenSource factory = new OAuthHeaderFactoryFromTokenSource(tokenSource); - - Token actualToken = factory.getToken(); - assertEquals(TOKEN_VALUE, actualToken.getAccessToken()); - assertEquals(TOKEN_TYPE, actualToken.getTokenType()); - - Map headers = factory.headers(); - assertNotNull(headers); - assertEquals(1, headers.size()); - assertEquals(TOKEN_TYPE + " " + TOKEN_VALUE, headers.get("Authorization")); - - // Verify token source was called exactly twice (once for getToken, once for headers) - verify(tokenSource, times(2)).getToken(); - } -} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryTest.java new file mode 100644 index 000000000..d0530b2c1 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/oauth/OAuthHeaderFactoryTest.java @@ -0,0 +1,96 @@ +package com.databricks.sdk.core.oauth; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.time.LocalDateTime; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Supplier; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class OAuthHeaderFactoryTest { + + private static final String TOKEN_TYPE = "Bearer"; + private static final String TOKEN_VALUE = "test-token"; + + @Mock private TokenSource tokenSource; + + private static Stream provideTokenSourceTestCases() { + LocalDateTime expiry = LocalDateTime.now().plusHours(1); + Token token = new Token(TOKEN_VALUE, TOKEN_TYPE, expiry); + + return Stream.of( + Arguments.of( + "Standard token source", + token, + Collections.singletonMap("Authorization", TOKEN_TYPE + " " + TOKEN_VALUE)), + Arguments.of( + "Token with custom type", + new Token(TOKEN_VALUE, "Custom", expiry), + Collections.singletonMap("Authorization", "Custom " + TOKEN_VALUE))); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideTokenSourceTestCases") + public void testFromTokenSourceFactoryMethod( + String testName, Token token, Map expectedHeaders) { + when(tokenSource.getToken()).thenReturn(token); + + OAuthHeaderFactory factory = OAuthHeaderFactory.fromTokenSource(tokenSource); + + assertNotNull(factory, "Factory should not be null"); + + Token actualToken = factory.getToken(); + assertEquals(token, actualToken, "Factory should return the same token as the source"); + + Map headers = factory.headers(); + assertEquals(expectedHeaders, headers, "Factory should generate correct headers"); + } + + private static Stream provideSuppliersTestCases() { + LocalDateTime expiry = LocalDateTime.now().plusHours(1); + Token token = new Token(TOKEN_VALUE, TOKEN_TYPE, expiry); + + Map standardHeaders = new HashMap<>(); + standardHeaders.put("Authorization", TOKEN_TYPE + " " + TOKEN_VALUE); + standardHeaders.put("Content-Type", "application/json"); + + Map multipleHeaders = new HashMap<>(); + multipleHeaders.put("Authorization", TOKEN_TYPE + " " + TOKEN_VALUE); + multipleHeaders.put("X-Custom-Header", "custom-value"); + multipleHeaders.put("Accept", "application/json"); + + return Stream.of( + Arguments.of("Standard suppliers", token, standardHeaders), + Arguments.of("Empty headers", token, new HashMap<>()), + Arguments.of("Multiple custom headers", token, multipleHeaders)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("provideSuppliersTestCases") + public void testFromSuppliersFactoryMethod( + String testName, Token token, Map expectedHeaders) { + Supplier tokenSupplier = () -> token; + Supplier> headerSupplier = () -> new HashMap<>(expectedHeaders); + + OAuthHeaderFactory factory = OAuthHeaderFactory.fromSuppliers(tokenSupplier, headerSupplier); + + assertNotNull(factory, "Factory should not be null"); + + Token actualToken = factory.getToken(); + assertEquals(token, actualToken, "Factory should return the same token as the supplier"); + + Map actualHeaders = factory.headers(); + assertEquals( + expectedHeaders, actualHeaders, "Factory should return the same headers as the supplier"); + } +} From ec211f3df365b7aef661ad98dbc9ab0452ccf18c Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Tue, 27 May 2025 14:50:23 +0000 Subject: [PATCH 29/31] Add latest generated code --- .codegen/_openapi_sha | 2 +- .gitattributes | 7 -- NEXT_CHANGELOG.md | 1 + .../com/databricks/sdk/WorkspaceClient.java | 2 + .../sdk/service/files/FilesAPI.java | 2 + .../sdk/service/files/FilesService.java | 2 + .../service/ml/ArtifactCredentialInfo.java | 114 ------------------ .../ml/ArtifactCredentialInfoHttpHeader.java | 58 --------- .../service/ml/ArtifactCredentialType.java | 14 --- .../sdk/service/ml/ExperimentsAPI.java | 24 ---- .../sdk/service/ml/ExperimentsImpl.java | 32 ----- .../sdk/service/ml/ExperimentsService.java | 8 -- ...redentialsForTraceDataDownloadRequest.java | 44 ------- ...edentialsForTraceDataDownloadResponse.java | 46 ------- ...tCredentialsForTraceDataUploadRequest.java | 44 ------- ...CredentialsForTraceDataUploadResponse.java | 45 ------- .../databricks/sdk/service/ml/RunInputs.java | 7 +- .../ServingEndpointsDataPlaneImpl.java | 5 +- 18 files changed, 10 insertions(+), 447 deletions(-) delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a74101922..1968c7cbf 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -b142b72bea6f30d8efb36dfa8c58e0d63ae5329b \ No newline at end of file +b953cae55554bf954eb006b9af961724048f3434 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 706329a62..13e10f33c 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1435,9 +1435,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java linguist-generated=true @@ -1508,10 +1505,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExper databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 96abd5fc1..22db10b2e 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -34,3 +34,4 @@ * Added `OIDC_FEDERATION` enum value for `com.databricks.sdk.service.sharing.AuthenticationType`. * [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.ConnectionInfo` to type `com.databricks.sdk.service.catalog.SecurableType` class. * [Breaking] Changed `catalogType` field for `com.databricks.sdk.service.catalog.SchemaInfo` to type `com.databricks.sdk.service.catalog.CatalogType` class. +* [Breaking] Removed `getCredentialsForTraceDataDownload()` and `getCredentialsForTraceDataUpload()` methods for `workspaceClient.experiments()` service. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index d4c066a69..70f12b841 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -796,6 +796,8 @@ public ExternalLocationsAPI externalLocations() { * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ public FilesAPI files() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java index 25f565e50..7bfecc4ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java @@ -26,6 +26,8 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java index b5103d010..791175943 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java @@ -21,6 +21,8 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html * *

This is the high-level interface, that contains generated methods. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java deleted file mode 100755 index 7f57da157..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class ArtifactCredentialInfo { - /** - * A collection of HTTP headers that should be specified when uploading to or downloading from the - * specified `signed_uri`. - */ - @JsonProperty("headers") - private Collection headers; - - /** - * The path, relative to the Run's artifact root location, of the artifact that can be accessed - * with the credential. - */ - @JsonProperty("path") - private String path; - - /** The ID of the MLflow Run containing the artifact that can be accessed with the credential. */ - @JsonProperty("run_id") - private String runId; - - /** The signed URI credential that provides access to the artifact. */ - @JsonProperty("signed_uri") - private String signedUri; - - /** - * The type of the signed credential URI (e.g., an AWS presigned URL or an Azure Shared Access - * Signature URI). - */ - @JsonProperty("type") - private ArtifactCredentialType typeValue; - - public ArtifactCredentialInfo setHeaders(Collection headers) { - this.headers = headers; - return this; - } - - public Collection getHeaders() { - return headers; - } - - public ArtifactCredentialInfo setPath(String path) { - this.path = path; - return this; - } - - public String getPath() { - return path; - } - - public ArtifactCredentialInfo setRunId(String runId) { - this.runId = runId; - return this; - } - - public String getRunId() { - return runId; - } - - public ArtifactCredentialInfo setSignedUri(String signedUri) { - this.signedUri = signedUri; - return this; - } - - public String getSignedUri() { - return signedUri; - } - - public ArtifactCredentialInfo setType(ArtifactCredentialType typeValue) { - this.typeValue = typeValue; - return this; - } - - public ArtifactCredentialType getType() { - return typeValue; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ArtifactCredentialInfo that = (ArtifactCredentialInfo) o; - return Objects.equals(headers, that.headers) - && Objects.equals(path, that.path) - && Objects.equals(runId, that.runId) - && Objects.equals(signedUri, that.signedUri) - && Objects.equals(typeValue, that.typeValue); - } - - @Override - public int hashCode() { - return Objects.hash(headers, path, runId, signedUri, typeValue); - } - - @Override - public String toString() { - return new ToStringer(ArtifactCredentialInfo.class) - .add("headers", headers) - .add("path", path) - .add("runId", runId) - .add("signedUri", signedUri) - .add("typeValue", typeValue) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java deleted file mode 100755 index 053a8991c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java +++ /dev/null @@ -1,58 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class ArtifactCredentialInfoHttpHeader { - /** The HTTP header name. */ - @JsonProperty("name") - private String name; - - /** The HTTP header value. */ - @JsonProperty("value") - private String value; - - public ArtifactCredentialInfoHttpHeader setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public ArtifactCredentialInfoHttpHeader setValue(String value) { - this.value = value; - return this; - } - - public String getValue() { - return value; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ArtifactCredentialInfoHttpHeader that = (ArtifactCredentialInfoHttpHeader) o; - return Objects.equals(name, that.name) && Objects.equals(value, that.value); - } - - @Override - public int hashCode() { - return Objects.hash(name, value); - } - - @Override - public String toString() { - return new ToStringer(ArtifactCredentialInfoHttpHeader.class) - .add("name", name) - .add("value", value) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java deleted file mode 100755 index ec4cf4370..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java +++ /dev/null @@ -1,14 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; - -/** The type of a given artifact access credential */ -@Generated -public enum ArtifactCredentialType { - AWS_PRESIGNED_URL, - AZURE_ADLS_GEN2_SAS_URI, - AZURE_SAS_URI, - GCP_SIGNED_URL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index bcc631e68..33040a824 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -173,30 +173,6 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { return impl.getByName(request); } - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - String requestId) { - return getCredentialsForTraceDataDownload( - new GetCredentialsForTraceDataDownloadRequest().setRequestId(requestId)); - } - - /** Get credentials to download trace data. */ - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest request) { - return impl.getCredentialsForTraceDataDownload(request); - } - - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - String requestId) { - return getCredentialsForTraceDataUpload( - new GetCredentialsForTraceDataUploadRequest().setRequestId(requestId)); - } - - /** Get credentials to upload trace data. */ - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest request) { - return impl.getCredentialsForTraceDataUpload(request); - } - public GetExperimentResponse getExperiment(String experimentId) { return getExperiment(new GetExperimentRequest().setExperimentId(experimentId)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index c228b7e72..4a06300fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -169,38 +169,6 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { } } - @Override - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest request) { - String path = - String.format( - "/api/2.0/mlflow/traces/%s/credentials-for-data-download", request.getRequestId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, GetCredentialsForTraceDataDownloadResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - @Override - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest request) { - String path = - String.format( - "/api/2.0/mlflow/traces/%s/credentials-for-data-upload", request.getRequestId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, GetCredentialsForTraceDataUploadResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - @Override public GetExperimentResponse getExperiment(GetExperimentRequest request) { String path = "/api/2.0/mlflow/experiments/get"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index abafed87e..06aae23be 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -96,14 +96,6 @@ FinalizeLoggedModelResponse finalizeLoggedModel( */ GetExperimentByNameResponse getByName(GetByNameRequest getByNameRequest); - /** Get credentials to download trace data. */ - GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest getCredentialsForTraceDataDownloadRequest); - - /** Get credentials to upload trace data. */ - GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest getCredentialsForTraceDataUploadRequest); - /** * Get an experiment. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java deleted file mode 100755 index 42aac217e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get credentials to download trace data */ -@Generated -public class GetCredentialsForTraceDataDownloadRequest { - /** The ID of the trace to fetch artifact download credentials for. */ - @JsonIgnore private String requestId; - - public GetCredentialsForTraceDataDownloadRequest setRequestId(String requestId) { - this.requestId = requestId; - return this; - } - - public String getRequestId() { - return requestId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataDownloadRequest that = (GetCredentialsForTraceDataDownloadRequest) o; - return Objects.equals(requestId, that.requestId); - } - - @Override - public int hashCode() { - return Objects.hash(requestId); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataDownloadRequest.class) - .add("requestId", requestId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java deleted file mode 100755 index 839e04921..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java +++ /dev/null @@ -1,46 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class GetCredentialsForTraceDataDownloadResponse { - /** The artifact download credentials for the specified trace data. */ - @JsonProperty("credential_info") - private ArtifactCredentialInfo credentialInfo; - - public GetCredentialsForTraceDataDownloadResponse setCredentialInfo( - ArtifactCredentialInfo credentialInfo) { - this.credentialInfo = credentialInfo; - return this; - } - - public ArtifactCredentialInfo getCredentialInfo() { - return credentialInfo; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataDownloadResponse that = - (GetCredentialsForTraceDataDownloadResponse) o; - return Objects.equals(credentialInfo, that.credentialInfo); - } - - @Override - public int hashCode() { - return Objects.hash(credentialInfo); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataDownloadResponse.class) - .add("credentialInfo", credentialInfo) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java deleted file mode 100755 index e7c6d452c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get credentials to upload trace data */ -@Generated -public class GetCredentialsForTraceDataUploadRequest { - /** The ID of the trace to fetch artifact upload credentials for. */ - @JsonIgnore private String requestId; - - public GetCredentialsForTraceDataUploadRequest setRequestId(String requestId) { - this.requestId = requestId; - return this; - } - - public String getRequestId() { - return requestId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataUploadRequest that = (GetCredentialsForTraceDataUploadRequest) o; - return Objects.equals(requestId, that.requestId); - } - - @Override - public int hashCode() { - return Objects.hash(requestId); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataUploadRequest.class) - .add("requestId", requestId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java deleted file mode 100755 index 9dcaed06c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java +++ /dev/null @@ -1,45 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class GetCredentialsForTraceDataUploadResponse { - /** The artifact upload credentials for the specified trace data. */ - @JsonProperty("credential_info") - private ArtifactCredentialInfo credentialInfo; - - public GetCredentialsForTraceDataUploadResponse setCredentialInfo( - ArtifactCredentialInfo credentialInfo) { - this.credentialInfo = credentialInfo; - return this; - } - - public ArtifactCredentialInfo getCredentialInfo() { - return credentialInfo; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataUploadResponse that = (GetCredentialsForTraceDataUploadResponse) o; - return Objects.equals(credentialInfo, that.credentialInfo); - } - - @Override - public int hashCode() { - return Objects.hash(credentialInfo); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataUploadResponse.class) - .add("credentialInfo", credentialInfo) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java index 604f034f7..243593e6a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java @@ -15,12 +15,7 @@ public class RunInputs { @JsonProperty("dataset_inputs") private Collection datasetInputs; - /** - * **NOTE**: Experimental: This API field may change or be removed in a future release without - * warning. - * - *

Model inputs to the Run. - */ + /** Model inputs to the Run. */ @JsonProperty("model_inputs") private Collection modelInputs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index b7d4c3be1..46e61fdc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -48,16 +48,13 @@ private DataPlaneInfo dataPlaneInfoQuery(QueryEndpointInput request) { public QueryEndpointResponse query(QueryEndpointInput request) { DataPlaneInfo dataPlaneInfo = dataPlaneInfoQuery(request); String path = dataPlaneInfo.getEndpointUrl(); - Token token = - dataPlaneTokenSource.getToken( - dataPlaneInfo.getEndpointUrl(), dataPlaneInfo.getAuthorizationDetails()); + Token token = dataPlaneTokenSource.getToken(path, dataPlaneInfo.getAuthorizationDetails()); try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - RequestOptions options = new RequestOptions() .withAuthorization(token.getTokenType() + " " + token.getAccessToken()) From dadddd8b7b6b86acb67eb5a4c05c49c2e475aa83 Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Tue, 27 May 2025 14:55:25 +0000 Subject: [PATCH 30/31] Revert "Add latest generated code" This reverts commit ec211f3df365b7aef661ad98dbc9ab0452ccf18c. --- .codegen/_openapi_sha | 2 +- .gitattributes | 7 ++ NEXT_CHANGELOG.md | 1 - .../com/databricks/sdk/WorkspaceClient.java | 2 - .../sdk/service/files/FilesAPI.java | 2 - .../sdk/service/files/FilesService.java | 2 - .../service/ml/ArtifactCredentialInfo.java | 114 ++++++++++++++++++ .../ml/ArtifactCredentialInfoHttpHeader.java | 58 +++++++++ .../service/ml/ArtifactCredentialType.java | 14 +++ .../sdk/service/ml/ExperimentsAPI.java | 24 ++++ .../sdk/service/ml/ExperimentsImpl.java | 32 +++++ .../sdk/service/ml/ExperimentsService.java | 8 ++ ...redentialsForTraceDataDownloadRequest.java | 44 +++++++ ...edentialsForTraceDataDownloadResponse.java | 46 +++++++ ...tCredentialsForTraceDataUploadRequest.java | 44 +++++++ ...CredentialsForTraceDataUploadResponse.java | 45 +++++++ .../databricks/sdk/service/ml/RunInputs.java | 7 +- .../ServingEndpointsDataPlaneImpl.java | 5 +- 18 files changed, 447 insertions(+), 10 deletions(-) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 1968c7cbf..a74101922 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -b953cae55554bf954eb006b9af961724048f3434 \ No newline at end of file +b142b72bea6f30d8efb36dfa8c58e0d63ae5329b \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 13e10f33c..706329a62 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1435,6 +1435,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java linguist-generated=true @@ -1505,6 +1508,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExper databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 22db10b2e..96abd5fc1 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -34,4 +34,3 @@ * Added `OIDC_FEDERATION` enum value for `com.databricks.sdk.service.sharing.AuthenticationType`. * [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.ConnectionInfo` to type `com.databricks.sdk.service.catalog.SecurableType` class. * [Breaking] Changed `catalogType` field for `com.databricks.sdk.service.catalog.SchemaInfo` to type `com.databricks.sdk.service.catalog.CatalogType` class. -* [Breaking] Removed `getCredentialsForTraceDataDownload()` and `getCredentialsForTraceDataUpload()` methods for `workspaceClient.experiments()` service. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 70f12b841..d4c066a69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -796,8 +796,6 @@ public ExternalLocationsAPI externalLocations() { * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * - *

Use of Files API may incur Databricks data transfer charges. - * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ public FilesAPI files() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java index 7bfecc4ca..25f565e50 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java @@ -26,8 +26,6 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * - *

Use of Files API may incur Databricks data transfer charges. - * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java index 791175943..b5103d010 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java @@ -21,8 +21,6 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * - *

Use of Files API may incur Databricks data transfer charges. - * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html * *

This is the high-level interface, that contains generated methods. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java new file mode 100755 index 000000000..7f57da157 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java @@ -0,0 +1,114 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ArtifactCredentialInfo { + /** + * A collection of HTTP headers that should be specified when uploading to or downloading from the + * specified `signed_uri`. + */ + @JsonProperty("headers") + private Collection headers; + + /** + * The path, relative to the Run's artifact root location, of the artifact that can be accessed + * with the credential. + */ + @JsonProperty("path") + private String path; + + /** The ID of the MLflow Run containing the artifact that can be accessed with the credential. */ + @JsonProperty("run_id") + private String runId; + + /** The signed URI credential that provides access to the artifact. */ + @JsonProperty("signed_uri") + private String signedUri; + + /** + * The type of the signed credential URI (e.g., an AWS presigned URL or an Azure Shared Access + * Signature URI). + */ + @JsonProperty("type") + private ArtifactCredentialType typeValue; + + public ArtifactCredentialInfo setHeaders(Collection headers) { + this.headers = headers; + return this; + } + + public Collection getHeaders() { + return headers; + } + + public ArtifactCredentialInfo setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public ArtifactCredentialInfo setRunId(String runId) { + this.runId = runId; + return this; + } + + public String getRunId() { + return runId; + } + + public ArtifactCredentialInfo setSignedUri(String signedUri) { + this.signedUri = signedUri; + return this; + } + + public String getSignedUri() { + return signedUri; + } + + public ArtifactCredentialInfo setType(ArtifactCredentialType typeValue) { + this.typeValue = typeValue; + return this; + } + + public ArtifactCredentialType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactCredentialInfo that = (ArtifactCredentialInfo) o; + return Objects.equals(headers, that.headers) + && Objects.equals(path, that.path) + && Objects.equals(runId, that.runId) + && Objects.equals(signedUri, that.signedUri) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(headers, path, runId, signedUri, typeValue); + } + + @Override + public String toString() { + return new ToStringer(ArtifactCredentialInfo.class) + .add("headers", headers) + .add("path", path) + .add("runId", runId) + .add("signedUri", signedUri) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java new file mode 100755 index 000000000..053a8991c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ArtifactCredentialInfoHttpHeader { + /** The HTTP header name. */ + @JsonProperty("name") + private String name; + + /** The HTTP header value. */ + @JsonProperty("value") + private String value; + + public ArtifactCredentialInfoHttpHeader setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ArtifactCredentialInfoHttpHeader setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArtifactCredentialInfoHttpHeader that = (ArtifactCredentialInfoHttpHeader) o; + return Objects.equals(name, that.name) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(name, value); + } + + @Override + public String toString() { + return new ToStringer(ArtifactCredentialInfoHttpHeader.class) + .add("name", name) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java new file mode 100755 index 000000000..ec4cf4370 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +/** The type of a given artifact access credential */ +@Generated +public enum ArtifactCredentialType { + AWS_PRESIGNED_URL, + AZURE_ADLS_GEN2_SAS_URI, + AZURE_SAS_URI, + GCP_SIGNED_URL, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 33040a824..bcc631e68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -173,6 +173,30 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { return impl.getByName(request); } + public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + String requestId) { + return getCredentialsForTraceDataDownload( + new GetCredentialsForTraceDataDownloadRequest().setRequestId(requestId)); + } + + /** Get credentials to download trace data. */ + public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + GetCredentialsForTraceDataDownloadRequest request) { + return impl.getCredentialsForTraceDataDownload(request); + } + + public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + String requestId) { + return getCredentialsForTraceDataUpload( + new GetCredentialsForTraceDataUploadRequest().setRequestId(requestId)); + } + + /** Get credentials to upload trace data. */ + public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + GetCredentialsForTraceDataUploadRequest request) { + return impl.getCredentialsForTraceDataUpload(request); + } + public GetExperimentResponse getExperiment(String experimentId) { return getExperiment(new GetExperimentRequest().setExperimentId(experimentId)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index 4a06300fd..c228b7e72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -169,6 +169,38 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { } } + @Override + public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + GetCredentialsForTraceDataDownloadRequest request) { + String path = + String.format( + "/api/2.0/mlflow/traces/%s/credentials-for-data-download", request.getRequestId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetCredentialsForTraceDataDownloadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + GetCredentialsForTraceDataUploadRequest request) { + String path = + String.format( + "/api/2.0/mlflow/traces/%s/credentials-for-data-upload", request.getRequestId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetCredentialsForTraceDataUploadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GetExperimentResponse getExperiment(GetExperimentRequest request) { String path = "/api/2.0/mlflow/experiments/get"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index 06aae23be..abafed87e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -96,6 +96,14 @@ FinalizeLoggedModelResponse finalizeLoggedModel( */ GetExperimentByNameResponse getByName(GetByNameRequest getByNameRequest); + /** Get credentials to download trace data. */ + GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( + GetCredentialsForTraceDataDownloadRequest getCredentialsForTraceDataDownloadRequest); + + /** Get credentials to upload trace data. */ + GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( + GetCredentialsForTraceDataUploadRequest getCredentialsForTraceDataUploadRequest); + /** * Get an experiment. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java new file mode 100755 index 000000000..42aac217e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get credentials to download trace data */ +@Generated +public class GetCredentialsForTraceDataDownloadRequest { + /** The ID of the trace to fetch artifact download credentials for. */ + @JsonIgnore private String requestId; + + public GetCredentialsForTraceDataDownloadRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataDownloadRequest that = (GetCredentialsForTraceDataDownloadRequest) o; + return Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(requestId); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataDownloadRequest.class) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java new file mode 100755 index 000000000..839e04921 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetCredentialsForTraceDataDownloadResponse { + /** The artifact download credentials for the specified trace data. */ + @JsonProperty("credential_info") + private ArtifactCredentialInfo credentialInfo; + + public GetCredentialsForTraceDataDownloadResponse setCredentialInfo( + ArtifactCredentialInfo credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public ArtifactCredentialInfo getCredentialInfo() { + return credentialInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataDownloadResponse that = + (GetCredentialsForTraceDataDownloadResponse) o; + return Objects.equals(credentialInfo, that.credentialInfo); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataDownloadResponse.class) + .add("credentialInfo", credentialInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java new file mode 100755 index 000000000..e7c6d452c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get credentials to upload trace data */ +@Generated +public class GetCredentialsForTraceDataUploadRequest { + /** The ID of the trace to fetch artifact upload credentials for. */ + @JsonIgnore private String requestId; + + public GetCredentialsForTraceDataUploadRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataUploadRequest that = (GetCredentialsForTraceDataUploadRequest) o; + return Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(requestId); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataUploadRequest.class) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java new file mode 100755 index 000000000..9dcaed06c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetCredentialsForTraceDataUploadResponse { + /** The artifact upload credentials for the specified trace data. */ + @JsonProperty("credential_info") + private ArtifactCredentialInfo credentialInfo; + + public GetCredentialsForTraceDataUploadResponse setCredentialInfo( + ArtifactCredentialInfo credentialInfo) { + this.credentialInfo = credentialInfo; + return this; + } + + public ArtifactCredentialInfo getCredentialInfo() { + return credentialInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialsForTraceDataUploadResponse that = (GetCredentialsForTraceDataUploadResponse) o; + return Objects.equals(credentialInfo, that.credentialInfo); + } + + @Override + public int hashCode() { + return Objects.hash(credentialInfo); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialsForTraceDataUploadResponse.class) + .add("credentialInfo", credentialInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java index 243593e6a..604f034f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java @@ -15,7 +15,12 @@ public class RunInputs { @JsonProperty("dataset_inputs") private Collection datasetInputs; - /** Model inputs to the Run. */ + /** + * **NOTE**: Experimental: This API field may change or be removed in a future release without + * warning. + * + *

Model inputs to the Run. + */ @JsonProperty("model_inputs") private Collection modelInputs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index 46e61fdc8..b7d4c3be1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -48,13 +48,16 @@ private DataPlaneInfo dataPlaneInfoQuery(QueryEndpointInput request) { public QueryEndpointResponse query(QueryEndpointInput request) { DataPlaneInfo dataPlaneInfo = dataPlaneInfoQuery(request); String path = dataPlaneInfo.getEndpointUrl(); - Token token = dataPlaneTokenSource.getToken(path, dataPlaneInfo.getAuthorizationDetails()); + Token token = + dataPlaneTokenSource.getToken( + dataPlaneInfo.getEndpointUrl(), dataPlaneInfo.getAuthorizationDetails()); try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); + RequestOptions options = new RequestOptions() .withAuthorization(token.getTokenType() + " " + token.getAccessToken()) From 4ea2d8ffe6e15b2b5bdb302dc2d8022daaf82fbe Mon Sep 17 00:00:00 2001 From: emmyzhou-db Date: Tue, 27 May 2025 14:58:54 +0000 Subject: [PATCH 31/31] Revert API changes --- .../sdk/service/serving/ServingEndpointsDataPlaneImpl.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index b7d4c3be1..46e61fdc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -48,16 +48,13 @@ private DataPlaneInfo dataPlaneInfoQuery(QueryEndpointInput request) { public QueryEndpointResponse query(QueryEndpointInput request) { DataPlaneInfo dataPlaneInfo = dataPlaneInfoQuery(request); String path = dataPlaneInfo.getEndpointUrl(); - Token token = - dataPlaneTokenSource.getToken( - dataPlaneInfo.getEndpointUrl(), dataPlaneInfo.getAuthorizationDetails()); + Token token = dataPlaneTokenSource.getToken(path, dataPlaneInfo.getAuthorizationDetails()); try { Request req = new Request("POST", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - RequestOptions options = new RequestOptions() .withAuthorization(token.getTokenType() + " " + token.getAccessToken())