diff --git a/README.md b/README.md
index e0d46a4d..fc72f69b 100644
--- a/README.md
+++ b/README.md
@@ -444,7 +444,7 @@ with open("result.png", "wb") as f:
## Data Frames
-This SDK supports working with tabular data using popular Python data frame libraries. When an API endpoint returns data in Arrow IPC format, the response is wrapped in an `TableResponse` class that provides methods to convert to various data frame formats:
+This SDK supports working with tabular data using popular Python data frame libraries. When an API endpoint returns data in Arrow IPC or Parquet format, the response is wrapped in a `TableResponse` class that provides methods to convert to various data frame formats:
- `to_pyarrow()`: Converts to a PyArrow Table
- `to_pandas()`: Converts to a Pandas DataFrame
@@ -822,6 +822,11 @@ Namespace | Resource | Operation | HTTP request |
**MediaSets** | MediaSet | [**transform**](docs/v2/MediaSets/MediaSet.md#transform) | **POST** /v2/mediasets/{mediaSetRid}/items/{mediaItemRid}/transform |
**MediaSets** | MediaSet | [**upload**](docs/v2/MediaSets/MediaSet.md#upload) | **POST** /v2/mediasets/{mediaSetRid}/items |
**MediaSets** | MediaSet | [**upload_media**](docs/v2/MediaSets/MediaSet.md#upload_media) | **PUT** /v2/mediasets/media/upload |
+**Models** | LiveDeployment | [**transform_json**](docs/v2/Models/LiveDeployment.md#transform_json) | **POST** /v2/models/liveDeployments/{liveDeploymentRid}/transformJson |
+**Models** | Model | [**create**](docs/v2/Models/Model.md#create) | **POST** /v2/models |
+**Models** | Model | [**get**](docs/v2/Models/Model.md#get) | **GET** /v2/models/{modelRid} |
+**Models** | ModelVersion | [**get**](docs/v2/Models/ModelVersion.md#get) | **GET** /v2/models/{modelRid}/versions/{modelVersionRid} |
+**Models** | ModelVersion | [**list**](docs/v2/Models/ModelVersion.md#list) | **GET** /v2/models/{modelRid}/versions |
**Ontologies** | Action | [**apply**](docs/v2/Ontologies/Action.md#apply) | **POST** /v2/ontologies/{ontology}/actions/{action}/apply |
**Ontologies** | Action | [**apply_batch**](docs/v2/Ontologies/Action.md#apply_batch) | **POST** /v2/ontologies/{ontology}/actions/{action}/applyBatch |
**Ontologies** | ActionType | [**get**](docs/v2/Ontologies/ActionType.md#get) | **GET** /v2/ontologies/{ontology}/actionTypes/{actionType} |
@@ -1702,11 +1707,13 @@ Namespace | Name | Import |
**LanguageModels** | [AnthropicDocument](docs/v2/LanguageModels/models/AnthropicDocument.md) | `from foundry_sdk.v2.language_models.models import AnthropicDocument` |
**LanguageModels** | [AnthropicDocumentCitations](docs/v2/LanguageModels/models/AnthropicDocumentCitations.md) | `from foundry_sdk.v2.language_models.models import AnthropicDocumentCitations` |
**LanguageModels** | [AnthropicDocumentSource](docs/v2/LanguageModels/models/AnthropicDocumentSource.md) | `from foundry_sdk.v2.language_models.models import AnthropicDocumentSource` |
+**LanguageModels** | [AnthropicEffort](docs/v2/LanguageModels/models/AnthropicEffort.md) | `from foundry_sdk.v2.language_models.models import AnthropicEffort` |
**LanguageModels** | [AnthropicEnabledThinking](docs/v2/LanguageModels/models/AnthropicEnabledThinking.md) | `from foundry_sdk.v2.language_models.models import AnthropicEnabledThinking` |
**LanguageModels** | [AnthropicEphemeralCacheControl](docs/v2/LanguageModels/models/AnthropicEphemeralCacheControl.md) | `from foundry_sdk.v2.language_models.models import AnthropicEphemeralCacheControl` |
**LanguageModels** | [AnthropicImage](docs/v2/LanguageModels/models/AnthropicImage.md) | `from foundry_sdk.v2.language_models.models import AnthropicImage` |
**LanguageModels** | [AnthropicImageBase64Source](docs/v2/LanguageModels/models/AnthropicImageBase64Source.md) | `from foundry_sdk.v2.language_models.models import AnthropicImageBase64Source` |
**LanguageModels** | [AnthropicImageSource](docs/v2/LanguageModels/models/AnthropicImageSource.md) | `from foundry_sdk.v2.language_models.models import AnthropicImageSource` |
+**LanguageModels** | [AnthropicJsonSchemaOutputFormat](docs/v2/LanguageModels/models/AnthropicJsonSchemaOutputFormat.md) | `from foundry_sdk.v2.language_models.models import AnthropicJsonSchemaOutputFormat` |
**LanguageModels** | [AnthropicMediaType](docs/v2/LanguageModels/models/AnthropicMediaType.md) | `from foundry_sdk.v2.language_models.models import AnthropicMediaType` |
**LanguageModels** | [AnthropicMessage](docs/v2/LanguageModels/models/AnthropicMessage.md) | `from foundry_sdk.v2.language_models.models import AnthropicMessage` |
**LanguageModels** | [AnthropicMessageContent](docs/v2/LanguageModels/models/AnthropicMessageContent.md) | `from foundry_sdk.v2.language_models.models import AnthropicMessageContent` |
@@ -1714,6 +1721,8 @@ Namespace | Name | Import |
**LanguageModels** | [AnthropicMessagesRequest](docs/v2/LanguageModels/models/AnthropicMessagesRequest.md) | `from foundry_sdk.v2.language_models.models import AnthropicMessagesRequest` |
**LanguageModels** | [AnthropicMessagesResponse](docs/v2/LanguageModels/models/AnthropicMessagesResponse.md) | `from foundry_sdk.v2.language_models.models import AnthropicMessagesResponse` |
**LanguageModels** | [AnthropicNoneToolChoice](docs/v2/LanguageModels/models/AnthropicNoneToolChoice.md) | `from foundry_sdk.v2.language_models.models import AnthropicNoneToolChoice` |
+**LanguageModels** | [AnthropicOutputConfig](docs/v2/LanguageModels/models/AnthropicOutputConfig.md) | `from foundry_sdk.v2.language_models.models import AnthropicOutputConfig` |
+**LanguageModels** | [AnthropicOutputFormat](docs/v2/LanguageModels/models/AnthropicOutputFormat.md) | `from foundry_sdk.v2.language_models.models import AnthropicOutputFormat` |
**LanguageModels** | [AnthropicRedactedThinking](docs/v2/LanguageModels/models/AnthropicRedactedThinking.md) | `from foundry_sdk.v2.language_models.models import AnthropicRedactedThinking` |
**LanguageModels** | [AnthropicSystemMessage](docs/v2/LanguageModels/models/AnthropicSystemMessage.md) | `from foundry_sdk.v2.language_models.models import AnthropicSystemMessage` |
**LanguageModels** | [AnthropicText](docs/v2/LanguageModels/models/AnthropicText.md) | `from foundry_sdk.v2.language_models.models import AnthropicText` |
@@ -1931,18 +1940,45 @@ Namespace | Name | Import |
**MediaSets** | [WaveformOperation](docs/v2/MediaSets/models/WaveformOperation.md) | `from foundry_sdk.v2.media_sets.models import WaveformOperation` |
**MediaSets** | [WavEncodeFormat](docs/v2/MediaSets/models/WavEncodeFormat.md) | `from foundry_sdk.v2.media_sets.models import WavEncodeFormat` |
**MediaSets** | [WebpFormat](docs/v2/MediaSets/models/WebpFormat.md) | `from foundry_sdk.v2.media_sets.models import WebpFormat` |
+**Models** | [BooleanParameter](docs/v2/Models/models/BooleanParameter.md) | `from foundry_sdk.v2.models.models import BooleanParameter` |
**Models** | [ColumnTypeSpecId](docs/v2/Models/models/ColumnTypeSpecId.md) | `from foundry_sdk.v2.models.models import ColumnTypeSpecId` |
**Models** | [CreateModelRequest](docs/v2/Models/models/CreateModelRequest.md) | `from foundry_sdk.v2.models.models import CreateModelRequest` |
**Models** | [CreateModelStudioConfigVersionRequest](docs/v2/Models/models/CreateModelStudioConfigVersionRequest.md) | `from foundry_sdk.v2.models.models import CreateModelStudioConfigVersionRequest` |
**Models** | [CreateModelStudioRequest](docs/v2/Models/models/CreateModelStudioRequest.md) | `from foundry_sdk.v2.models.models import CreateModelStudioRequest` |
**Models** | [CreateModelVersionRequest](docs/v2/Models/models/CreateModelVersionRequest.md) | `from foundry_sdk.v2.models.models import CreateModelVersionRequest` |
**Models** | [DatasetInput](docs/v2/Models/models/DatasetInput.md) | `from foundry_sdk.v2.models.models import DatasetInput` |
+**Models** | [DatetimeParameter](docs/v2/Models/models/DatetimeParameter.md) | `from foundry_sdk.v2.models.models import DatetimeParameter` |
**Models** | [DillModelFiles](docs/v2/Models/models/DillModelFiles.md) | `from foundry_sdk.v2.models.models import DillModelFiles` |
+**Models** | [DoubleParameter](docs/v2/Models/models/DoubleParameter.md) | `from foundry_sdk.v2.models.models import DoubleParameter` |
+**Models** | [DoubleSeriesAggregations](docs/v2/Models/models/DoubleSeriesAggregations.md) | `from foundry_sdk.v2.models.models import DoubleSeriesAggregations` |
+**Models** | [DoubleSeriesV1](docs/v2/Models/models/DoubleSeriesV1.md) | `from foundry_sdk.v2.models.models import DoubleSeriesV1` |
+**Models** | [DoubleSeriesValueV1](docs/v2/Models/models/DoubleSeriesValueV1.md) | `from foundry_sdk.v2.models.models import DoubleSeriesValueV1` |
+**Models** | [EpochMillis](docs/v2/Models/models/EpochMillis.md) | `from foundry_sdk.v2.models.models import EpochMillis` |
+**Models** | [Experiment](docs/v2/Models/models/Experiment.md) | `from foundry_sdk.v2.models.models import Experiment` |
+**Models** | [ExperimentArtifactDetails](docs/v2/Models/models/ExperimentArtifactDetails.md) | `from foundry_sdk.v2.models.models import ExperimentArtifactDetails` |
+**Models** | [ExperimentArtifactMetadata](docs/v2/Models/models/ExperimentArtifactMetadata.md) | `from foundry_sdk.v2.models.models import ExperimentArtifactMetadata` |
+**Models** | [ExperimentArtifactName](docs/v2/Models/models/ExperimentArtifactName.md) | `from foundry_sdk.v2.models.models import ExperimentArtifactName` |
+**Models** | [ExperimentAuthoringSource](docs/v2/Models/models/ExperimentAuthoringSource.md) | `from foundry_sdk.v2.models.models import ExperimentAuthoringSource` |
+**Models** | [ExperimentBranch](docs/v2/Models/models/ExperimentBranch.md) | `from foundry_sdk.v2.models.models import ExperimentBranch` |
+**Models** | [ExperimentCodeWorkspaceSource](docs/v2/Models/models/ExperimentCodeWorkspaceSource.md) | `from foundry_sdk.v2.models.models import ExperimentCodeWorkspaceSource` |
+**Models** | [ExperimentName](docs/v2/Models/models/ExperimentName.md) | `from foundry_sdk.v2.models.models import ExperimentName` |
+**Models** | [ExperimentRid](docs/v2/Models/models/ExperimentRid.md) | `from foundry_sdk.v2.models.models import ExperimentRid` |
+**Models** | [ExperimentSdkSource](docs/v2/Models/models/ExperimentSdkSource.md) | `from foundry_sdk.v2.models.models import ExperimentSdkSource` |
+**Models** | [ExperimentSource](docs/v2/Models/models/ExperimentSource.md) | `from foundry_sdk.v2.models.models import ExperimentSource` |
+**Models** | [ExperimentStatus](docs/v2/Models/models/ExperimentStatus.md) | `from foundry_sdk.v2.models.models import ExperimentStatus` |
+**Models** | [ExperimentTagText](docs/v2/Models/models/ExperimentTagText.md) | `from foundry_sdk.v2.models.models import ExperimentTagText` |
+**Models** | [InconsistentArrayDimensionsError](docs/v2/Models/models/InconsistentArrayDimensionsError.md) | `from foundry_sdk.v2.models.models import InconsistentArrayDimensionsError` |
+**Models** | [InferenceInputErrorType](docs/v2/Models/models/InferenceInputErrorType.md) | `from foundry_sdk.v2.models.models import InferenceInputErrorType` |
**Models** | [InputAlias](docs/v2/Models/models/InputAlias.md) | `from foundry_sdk.v2.models.models import InputAlias` |
+**Models** | [IntegerParameter](docs/v2/Models/models/IntegerParameter.md) | `from foundry_sdk.v2.models.models import IntegerParameter` |
+**Models** | [InvalidArrayShapeError](docs/v2/Models/models/InvalidArrayShapeError.md) | `from foundry_sdk.v2.models.models import InvalidArrayShapeError` |
+**Models** | [InvalidMapFormatError](docs/v2/Models/models/InvalidMapFormatError.md) | `from foundry_sdk.v2.models.models import InvalidMapFormatError` |
+**Models** | [InvalidTabularFormatError](docs/v2/Models/models/InvalidTabularFormatError.md) | `from foundry_sdk.v2.models.models import InvalidTabularFormatError` |
**Models** | [ListModelStudioConfigVersionsResponse](docs/v2/Models/models/ListModelStudioConfigVersionsResponse.md) | `from foundry_sdk.v2.models.models import ListModelStudioConfigVersionsResponse` |
**Models** | [ListModelStudioRunsResponse](docs/v2/Models/models/ListModelStudioRunsResponse.md) | `from foundry_sdk.v2.models.models import ListModelStudioRunsResponse` |
**Models** | [ListModelStudioTrainersResponse](docs/v2/Models/models/ListModelStudioTrainersResponse.md) | `from foundry_sdk.v2.models.models import ListModelStudioTrainersResponse` |
**Models** | [ListModelVersionsResponse](docs/v2/Models/models/ListModelVersionsResponse.md) | `from foundry_sdk.v2.models.models import ListModelVersionsResponse` |
+**Models** | [LiveDeploymentRid](docs/v2/Models/models/LiveDeploymentRid.md) | `from foundry_sdk.v2.models.models import LiveDeploymentRid` |
**Models** | [Model](docs/v2/Models/models/Model.md) | `from foundry_sdk.v2.models.models import Model` |
**Models** | [ModelApi](docs/v2/Models/models/ModelApi.md) | `from foundry_sdk.v2.models.models import ModelApi` |
**Models** | [ModelApiAnyType](docs/v2/Models/models/ModelApiAnyType.md) | `from foundry_sdk.v2.models.models import ModelApiAnyType` |
@@ -1977,8 +2013,39 @@ Namespace | Name | Import |
**Models** | [ModelVersion](docs/v2/Models/models/ModelVersion.md) | `from foundry_sdk.v2.models.models import ModelVersion` |
**Models** | [ModelVersionRid](docs/v2/Models/models/ModelVersionRid.md) | `from foundry_sdk.v2.models.models import ModelVersionRid` |
**Models** | [OutputAlias](docs/v2/Models/models/OutputAlias.md) | `from foundry_sdk.v2.models.models import OutputAlias` |
+**Models** | [Parameter](docs/v2/Models/models/Parameter.md) | `from foundry_sdk.v2.models.models import Parameter` |
+**Models** | [ParameterName](docs/v2/Models/models/ParameterName.md) | `from foundry_sdk.v2.models.models import ParameterName` |
+**Models** | [ParameterValue](docs/v2/Models/models/ParameterValue.md) | `from foundry_sdk.v2.models.models import ParameterValue` |
+**Models** | [RequiredValueMissingError](docs/v2/Models/models/RequiredValueMissingError.md) | `from foundry_sdk.v2.models.models import RequiredValueMissingError` |
**Models** | [ResourceConfiguration](docs/v2/Models/models/ResourceConfiguration.md) | `from foundry_sdk.v2.models.models import ResourceConfiguration` |
**Models** | [RunId](docs/v2/Models/models/RunId.md) | `from foundry_sdk.v2.models.models import RunId` |
+**Models** | [SearchExperimentsAndFilter](docs/v2/Models/models/SearchExperimentsAndFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsAndFilter` |
+**Models** | [SearchExperimentsContainsFilter](docs/v2/Models/models/SearchExperimentsContainsFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsContainsFilter` |
+**Models** | [SearchExperimentsContainsFilterField](docs/v2/Models/models/SearchExperimentsContainsFilterField.md) | `from foundry_sdk.v2.models.models import SearchExperimentsContainsFilterField` |
+**Models** | [SearchExperimentsEqualsFilter](docs/v2/Models/models/SearchExperimentsEqualsFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsEqualsFilter` |
+**Models** | [SearchExperimentsEqualsFilterField](docs/v2/Models/models/SearchExperimentsEqualsFilterField.md) | `from foundry_sdk.v2.models.models import SearchExperimentsEqualsFilterField` |
+**Models** | [SearchExperimentsFilter](docs/v2/Models/models/SearchExperimentsFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsFilter` |
+**Models** | [SearchExperimentsFilterOperator](docs/v2/Models/models/SearchExperimentsFilterOperator.md) | `from foundry_sdk.v2.models.models import SearchExperimentsFilterOperator` |
+**Models** | [SearchExperimentsNotFilter](docs/v2/Models/models/SearchExperimentsNotFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsNotFilter` |
+**Models** | [SearchExperimentsOrderBy](docs/v2/Models/models/SearchExperimentsOrderBy.md) | `from foundry_sdk.v2.models.models import SearchExperimentsOrderBy` |
+**Models** | [SearchExperimentsOrderByField](docs/v2/Models/models/SearchExperimentsOrderByField.md) | `from foundry_sdk.v2.models.models import SearchExperimentsOrderByField` |
+**Models** | [SearchExperimentsOrFilter](docs/v2/Models/models/SearchExperimentsOrFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsOrFilter` |
+**Models** | [SearchExperimentsParameterFilter](docs/v2/Models/models/SearchExperimentsParameterFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsParameterFilter` |
+**Models** | [SearchExperimentsRequest](docs/v2/Models/models/SearchExperimentsRequest.md) | `from foundry_sdk.v2.models.models import SearchExperimentsRequest` |
+**Models** | [SearchExperimentsResponse](docs/v2/Models/models/SearchExperimentsResponse.md) | `from foundry_sdk.v2.models.models import SearchExperimentsResponse` |
+**Models** | [SearchExperimentsSeriesFilter](docs/v2/Models/models/SearchExperimentsSeriesFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsSeriesFilter` |
+**Models** | [SearchExperimentsSeriesFilterField](docs/v2/Models/models/SearchExperimentsSeriesFilterField.md) | `from foundry_sdk.v2.models.models import SearchExperimentsSeriesFilterField` |
+**Models** | [SearchExperimentsStartsWithFilter](docs/v2/Models/models/SearchExperimentsStartsWithFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsStartsWithFilter` |
+**Models** | [SearchExperimentsStartsWithFilterField](docs/v2/Models/models/SearchExperimentsStartsWithFilterField.md) | `from foundry_sdk.v2.models.models import SearchExperimentsStartsWithFilterField` |
+**Models** | [SearchExperimentsSummaryMetricFilter](docs/v2/Models/models/SearchExperimentsSummaryMetricFilter.md) | `from foundry_sdk.v2.models.models import SearchExperimentsSummaryMetricFilter` |
+**Models** | [Series](docs/v2/Models/models/Series.md) | `from foundry_sdk.v2.models.models import Series` |
+**Models** | [SeriesAggregations](docs/v2/Models/models/SeriesAggregations.md) | `from foundry_sdk.v2.models.models import SeriesAggregations` |
+**Models** | [SeriesAggregationsValue](docs/v2/Models/models/SeriesAggregationsValue.md) | `from foundry_sdk.v2.models.models import SeriesAggregationsValue` |
+**Models** | [SeriesName](docs/v2/Models/models/SeriesName.md) | `from foundry_sdk.v2.models.models import SeriesName` |
+**Models** | [StringParameter](docs/v2/Models/models/StringParameter.md) | `from foundry_sdk.v2.models.models import StringParameter` |
+**Models** | [SummaryMetric](docs/v2/Models/models/SummaryMetric.md) | `from foundry_sdk.v2.models.models import SummaryMetric` |
+**Models** | [SummaryMetricAggregation](docs/v2/Models/models/SummaryMetricAggregation.md) | `from foundry_sdk.v2.models.models import SummaryMetricAggregation` |
+**Models** | [TableArtifactDetails](docs/v2/Models/models/TableArtifactDetails.md) | `from foundry_sdk.v2.models.models import TableArtifactDetails` |
**Models** | [TrainerDescription](docs/v2/Models/models/TrainerDescription.md) | `from foundry_sdk.v2.models.models import TrainerDescription` |
**Models** | [TrainerId](docs/v2/Models/models/TrainerId.md) | `from foundry_sdk.v2.models.models import TrainerId` |
**Models** | [TrainerInputsSpecification](docs/v2/Models/models/TrainerInputsSpecification.md) | `from foundry_sdk.v2.models.models import TrainerInputsSpecification` |
@@ -1988,6 +2055,11 @@ Namespace | Name | Import |
**Models** | [TrainerType](docs/v2/Models/models/TrainerType.md) | `from foundry_sdk.v2.models.models import TrainerType` |
**Models** | [TrainerVersion](docs/v2/Models/models/TrainerVersion.md) | `from foundry_sdk.v2.models.models import TrainerVersion` |
**Models** | [TrainerVersionLocator](docs/v2/Models/models/TrainerVersionLocator.md) | `from foundry_sdk.v2.models.models import TrainerVersionLocator` |
+**Models** | [TransformJsonLiveDeploymentRequest](docs/v2/Models/models/TransformJsonLiveDeploymentRequest.md) | `from foundry_sdk.v2.models.models import TransformJsonLiveDeploymentRequest` |
+**Models** | [TransformLiveDeploymentResponse](docs/v2/Models/models/TransformLiveDeploymentResponse.md) | `from foundry_sdk.v2.models.models import TransformLiveDeploymentResponse` |
+**Models** | [TypeMismatchError](docs/v2/Models/models/TypeMismatchError.md) | `from foundry_sdk.v2.models.models import TypeMismatchError` |
+**Models** | [UnknownInputNameError](docs/v2/Models/models/UnknownInputNameError.md) | `from foundry_sdk.v2.models.models import UnknownInputNameError` |
+**Models** | [UnsupportedTypeError](docs/v2/Models/models/UnsupportedTypeError.md) | `from foundry_sdk.v2.models.models import UnsupportedTypeError` |
**Ontologies** | [AbsoluteTimeRange](docs/v2/Ontologies/models/AbsoluteTimeRange.md) | `from foundry_sdk.v2.ontologies.models import AbsoluteTimeRange` |
**Ontologies** | [AbsoluteValuePropertyExpression](docs/v2/Ontologies/models/AbsoluteValuePropertyExpression.md) | `from foundry_sdk.v2.ontologies.models import AbsoluteValuePropertyExpression` |
**Ontologies** | [ActionExecutionTime](docs/v2/Ontologies/models/ActionExecutionTime.md) | `from foundry_sdk.v2.ontologies.models import ActionExecutionTime` |
@@ -2146,6 +2218,9 @@ Namespace | Name | Import |
**Ontologies** | [FunctionVersion](docs/v2/Ontologies/models/FunctionVersion.md) | `from foundry_sdk.v2.ontologies.models import FunctionVersion` |
**Ontologies** | [FuzzyRule](docs/v2/Ontologies/models/FuzzyRule.md) | `from foundry_sdk.v2.ontologies.models import FuzzyRule` |
**Ontologies** | [FuzzyV2](docs/v2/Ontologies/models/FuzzyV2.md) | `from foundry_sdk.v2.ontologies.models import FuzzyV2` |
+**Ontologies** | [GeoJsonString](docs/v2/Ontologies/models/GeoJsonString.md) | `from foundry_sdk.v2.ontologies.models import GeoJsonString` |
+**Ontologies** | [GeoShapeV2Geometry](docs/v2/Ontologies/models/GeoShapeV2Geometry.md) | `from foundry_sdk.v2.ontologies.models import GeoShapeV2Geometry` |
+**Ontologies** | [GeoShapeV2Query](docs/v2/Ontologies/models/GeoShapeV2Query.md) | `from foundry_sdk.v2.ontologies.models import GeoShapeV2Query` |
**Ontologies** | [GeotemporalSeriesEntry](docs/v2/Ontologies/models/GeotemporalSeriesEntry.md) | `from foundry_sdk.v2.ontologies.models import GeotemporalSeriesEntry` |
**Ontologies** | [GeotimeSeriesValue](docs/v2/Ontologies/models/GeotimeSeriesValue.md) | `from foundry_sdk.v2.ontologies.models import GeotimeSeriesValue` |
**Ontologies** | [GetSelectedPropertyOperation](docs/v2/Ontologies/models/GetSelectedPropertyOperation.md) | `from foundry_sdk.v2.ontologies.models import GetSelectedPropertyOperation` |
@@ -2442,6 +2517,7 @@ Namespace | Name | Import |
**Ontologies** | [SharedPropertyType](docs/v2/Ontologies/models/SharedPropertyType.md) | `from foundry_sdk.v2.ontologies.models import SharedPropertyType` |
**Ontologies** | [SharedPropertyTypeApiName](docs/v2/Ontologies/models/SharedPropertyTypeApiName.md) | `from foundry_sdk.v2.ontologies.models import SharedPropertyTypeApiName` |
**Ontologies** | [SharedPropertyTypeRid](docs/v2/Ontologies/models/SharedPropertyTypeRid.md) | `from foundry_sdk.v2.ontologies.models import SharedPropertyTypeRid` |
+**Ontologies** | [SpatialFilterMode](docs/v2/Ontologies/models/SpatialFilterMode.md) | `from foundry_sdk.v2.ontologies.models import SpatialFilterMode` |
**Ontologies** | [StartsWithQuery](docs/v2/Ontologies/models/StartsWithQuery.md) | `from foundry_sdk.v2.ontologies.models import StartsWithQuery` |
**Ontologies** | [StaticArgument](docs/v2/Ontologies/models/StaticArgument.md) | `from foundry_sdk.v2.ontologies.models import StaticArgument` |
**Ontologies** | [StreamGeotemporalSeriesValuesRequest](docs/v2/Ontologies/models/StreamGeotemporalSeriesValuesRequest.md) | `from foundry_sdk.v2.ontologies.models import StreamGeotemporalSeriesValuesRequest` |
@@ -2729,6 +2805,7 @@ Namespace | Name | Import |
**Core** | [IntegerType](docs/v1/Core/models/IntegerType.md) | `from foundry_sdk.v1.core.models import IntegerType` |
**Core** | [LongType](docs/v1/Core/models/LongType.md) | `from foundry_sdk.v1.core.models import LongType` |
**Core** | [MarkingType](docs/v1/Core/models/MarkingType.md) | `from foundry_sdk.v1.core.models import MarkingType` |
+**Core** | [MediaReferenceType](docs/v1/Core/models/MediaReferenceType.md) | `from foundry_sdk.v1.core.models import MediaReferenceType` |
**Core** | [MediaType](docs/v1/Core/models/MediaType.md) | `from foundry_sdk.v1.core.models import MediaType` |
**Core** | [NullType](docs/v1/Core/models/NullType.md) | `from foundry_sdk.v1.core.models import NullType` |
**Core** | [OperationScope](docs/v1/Core/models/OperationScope.md) | `from foundry_sdk.v1.core.models import OperationScope` |
@@ -3309,16 +3386,30 @@ Namespace | Name | Import |
**Models** | CreateModelStudioConfigVersionPermissionDenied | `from foundry_sdk.v2.models.errors import CreateModelStudioConfigVersionPermissionDenied` |
**Models** | CreateModelStudioPermissionDenied | `from foundry_sdk.v2.models.errors import CreateModelStudioPermissionDenied` |
**Models** | CreateModelVersionPermissionDenied | `from foundry_sdk.v2.models.errors import CreateModelVersionPermissionDenied` |
+**Models** | ExperimentArtifactNotFound | `from foundry_sdk.v2.models.errors import ExperimentArtifactNotFound` |
+**Models** | ExperimentNotFound | `from foundry_sdk.v2.models.errors import ExperimentNotFound` |
+**Models** | ExperimentSeriesNotFound | `from foundry_sdk.v2.models.errors import ExperimentSeriesNotFound` |
+**Models** | InferenceFailure | `from foundry_sdk.v2.models.errors import InferenceFailure` |
+**Models** | InferenceInvalidInput | `from foundry_sdk.v2.models.errors import InferenceInvalidInput` |
+**Models** | InferenceTimeout | `from foundry_sdk.v2.models.errors import InferenceTimeout` |
**Models** | InvalidModelApi | `from foundry_sdk.v2.models.errors import InvalidModelApi` |
**Models** | InvalidModelStudioCreateRequest | `from foundry_sdk.v2.models.errors import InvalidModelStudioCreateRequest` |
+**Models** | JsonExperimentArtifactTablePermissionDenied | `from foundry_sdk.v2.models.errors import JsonExperimentArtifactTablePermissionDenied` |
+**Models** | JsonExperimentSeriesPermissionDenied | `from foundry_sdk.v2.models.errors import JsonExperimentSeriesPermissionDenied` |
**Models** | LatestModelStudioConfigVersionsPermissionDenied | `from foundry_sdk.v2.models.errors import LatestModelStudioConfigVersionsPermissionDenied` |
**Models** | LaunchModelStudioPermissionDenied | `from foundry_sdk.v2.models.errors import LaunchModelStudioPermissionDenied` |
+**Models** | LiveDeploymentNotFound | `from foundry_sdk.v2.models.errors import LiveDeploymentNotFound` |
+**Models** | ModelExperimentNotFound | `from foundry_sdk.v2.models.errors import ModelExperimentNotFound` |
**Models** | ModelNotFound | `from foundry_sdk.v2.models.errors import ModelNotFound` |
**Models** | ModelStudioConfigVersionNotFound | `from foundry_sdk.v2.models.errors import ModelStudioConfigVersionNotFound` |
**Models** | ModelStudioNotFound | `from foundry_sdk.v2.models.errors import ModelStudioNotFound` |
**Models** | ModelStudioTrainerNotFound | `from foundry_sdk.v2.models.errors import ModelStudioTrainerNotFound` |
**Models** | ModelVersionNotFound | `from foundry_sdk.v2.models.errors import ModelVersionNotFound` |
+**Models** | ParquetExperimentArtifactTablePermissionDenied | `from foundry_sdk.v2.models.errors import ParquetExperimentArtifactTablePermissionDenied` |
+**Models** | ParquetExperimentSeriesPermissionDenied | `from foundry_sdk.v2.models.errors import ParquetExperimentSeriesPermissionDenied` |
+**Models** | SearchExperimentsPermissionDenied | `from foundry_sdk.v2.models.errors import SearchExperimentsPermissionDenied` |
**Models** | TrainerNotFound | `from foundry_sdk.v2.models.errors import TrainerNotFound` |
+**Models** | TransformJsonLiveDeploymentPermissionDenied | `from foundry_sdk.v2.models.errors import TransformJsonLiveDeploymentPermissionDenied` |
**Ontologies** | ActionContainsDuplicateEdits | `from foundry_sdk.v2.ontologies.errors import ActionContainsDuplicateEdits` |
**Ontologies** | ActionEditedPropertiesNotFound | `from foundry_sdk.v2.ontologies.errors import ActionEditedPropertiesNotFound` |
**Ontologies** | ActionEditsReadOnlyEntity | `from foundry_sdk.v2.ontologies.errors import ActionEditsReadOnlyEntity` |
diff --git a/config.json b/config.json
index e2de44e9..69c4deed 100644
--- a/config.json
+++ b/config.json
@@ -204,9 +204,13 @@
"skipValidation": {
"Filesystem": ["ResourceType"]
},
- "tableOperations": [
+ "arrowTableOperations": [
"readTableDataset",
"getResultsSqlQuery"
+ ],
+ "parquetTableOperations": [
+ "parquetExperimentSeries",
+ "parquetExperimentArtifactTable"
]
}
},
diff --git a/docs-snippets-npm/package.json b/docs-snippets-npm/package.json
index 77329399..927f1374 100644
--- a/docs-snippets-npm/package.json
+++ b/docs-snippets-npm/package.json
@@ -24,7 +24,7 @@
"sls": {
"dependencies": {
"com.palantir.foundry.api:api-gateway": {
- "minVersion": "1.1483.0",
+ "minVersion": "1.1485.0",
"maxVersion": "1.x.x",
"optional": false
}
diff --git a/docs-snippets-npm/src/index.ts b/docs-snippets-npm/src/index.ts
index 3247a808..4c4a2e33 100644
--- a/docs-snippets-npm/src/index.ts
+++ b/docs-snippets-npm/src/index.ts
@@ -1095,7 +1095,7 @@ export const PYTHON_PLATFORM_SNIPPETS: SdkSnippets.media-item.`, where `` is the same as the instance part of the media set RID, and `` is a UUID. An `InvalidMediaItemRid` error will be thrown if the RID is not in the expected format. A `MediaItemRidAlreadyExists` error will be thrown if the media set already contains a media item with the same RID.\nmedia_item_rid = None\n# Optional[PreviewMode] | A boolean flag that, when set to true, enables the use of beta features in preview mode.\npreview = None\n\n\ntry:\n api_response = client.media_sets.MediaSet.upload_media(\n body,\n filename=filename,\n attribution=attribution,\n media_item_rid=media_item_rid,\n preview=preview,\n )\n print(\"The upload_media response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling MediaSet.upload_media: %s\\n\" % e)"
}
],
+ "v2.getExperiment": [
+ {
+ "template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# ModelRid\nmodel_rid = None\n# ExperimentRid\nexperiment_rid = None\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n\n\ntry:\n api_response = client.models.Model.Experiment.get(model_rid, experiment_rid, preview=preview)\n print(\"The get response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling Experiment.get: %s\\n\" % e)"
+ }
+ ],
+ "v2.searchExperiments": [
+ {
+ "template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# ModelRid\nmodel_rid = None\n# Optional[SearchExperimentsOrderBy] | The field to sort by. Default is to sort by relevance.\norder_by = {\"field\": \"EXPERIMENT_NAME\", \"direction\": \"ASC\"}\n# Optional[PageSize] | The maximum number of results to return. Default 50, maximum of 100.\npage_size = 100\n# Optional[PageToken] | PageToken to identify the next page to retrieve. Leave empty for the first request.\npage_token = \"v1.QnVpbGQgdGhlIEZ1dHVyZTogaHR0cHM6Ly93d3cucGFsYW50aXIuY29tL2NhcmVlcnMvP2xldmVyLXNvdXJjZSU1YiU1ZD1BUElEb2NzI29wZW4tcG9zaXRpb25z\"\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n# Optional[SearchExperimentsFilter] | Optional search filter for filtering experiments. If not provided, all experiments for the model are returned.\nwhere = None\n\n\ntry:\n api_response = client.models.Model.Experiment.search(\n model_rid,\n order_by=order_by,\n page_size=page_size,\n page_token=page_token,\n preview=preview,\n where=where,\n )\n print(\"The search response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling Experiment.search: %s\\n\" % e)"
+ }
+ ],
+ "v2.jsonExperimentArtifactTable": [
+ {
+ "template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# ModelRid\nmodel_rid = None\n# ExperimentRid\nexperiment_rid = None\n# ExperimentArtifactName\nexperiment_artifact_table_name = None\n# Optional[int] | Number of rows to skip from the beginning. Defaults to 0.\noffset = None\n# Optional[PageSize] | Maximum number of rows to return. Default is 10, maximum is 100.\npage_size = None\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n\n\ntry:\n api_response = client.models.Model.Experiment.ArtifactTable.json(\n model_rid,\n experiment_rid,\n experiment_artifact_table_name,\n offset=offset,\n page_size=page_size,\n preview=preview,\n )\n print(\"The json response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling ArtifactTable.json: %s\\n\" % e)"
+ }
+ ],
+ "v2.parquetExperimentArtifactTable": [
+ {
+ "template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# ModelRid\nmodel_rid = None\n# ExperimentRid\nexperiment_rid = None\n# ExperimentArtifactName\nexperiment_artifact_table_name = None\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n\n\ntry:\n api_response = client.models.Model.Experiment.ArtifactTable.parquet(\n model_rid, experiment_rid, experiment_artifact_table_name, preview=preview\n )\n print(\"The parquet response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling ArtifactTable.parquet: %s\\n\" % e)"
+ }
+ ],
+ "v2.jsonExperimentSeries": [
+ {
+ "template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# ModelRid\nmodel_rid = None\n# ExperimentRid\nexperiment_rid = None\n# SeriesName\nexperiment_series_name = None\n# Optional[int] | Number of values to skip from the beginning. Defaults to 0.\noffset = None\n# Optional[PageSize] | Maximum number of values to return per page. Default is 200, maximum is 1000.\npage_size = None\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n\n\ntry:\n api_response = client.models.Model.Experiment.Series.json(\n model_rid,\n experiment_rid,\n experiment_series_name,\n offset=offset,\n page_size=page_size,\n preview=preview,\n )\n print(\"The json response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling Series.json: %s\\n\" % e)"
+ }
+ ],
+ "v2.parquetExperimentSeries": [
+ {
+ "template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# ModelRid\nmodel_rid = None\n# ExperimentRid\nexperiment_rid = None\n# SeriesName\nexperiment_series_name = None\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n\n\ntry:\n api_response = client.models.Model.Experiment.Series.parquet(\n model_rid, experiment_rid, experiment_series_name, preview=preview\n )\n print(\"The parquet response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling Series.parquet: %s\\n\" % e)"
+ }
+ ],
+ "v2.transformJsonLiveDeployment": [
+ {
+ "template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# LiveDeploymentRid\nlive_deployment_rid = None\n# Dict[str, Any] | The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data.\ninput = {\"input_df\": [{\"feature_1\": 1.0, \"feature_2\": 2}]}\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n\n\ntry:\n api_response = client.models.LiveDeployment.transform_json(\n live_deployment_rid, input=input, preview=preview\n )\n print(\"The transform_json response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling LiveDeployment.transform_json: %s\\n\" % e)"
+ }
+ ],
"v2.createModel": [
{
"template": "from foundry_sdk import FoundryClient\nimport foundry_sdk\nfrom pprint import pprint\n\nclient = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname=\"example.palantirfoundry.com\")\n\n# ModelName\nname = \"House Pricing Model\"\n# FolderRid\nparent_folder_rid = \"ri.compass.main.folder.c410f510-2937-420e-8ea3-8c9bcb3c1791\"\n# Optional[PreviewMode] | Enables the use of preview functionality.\npreview = None\n\n\ntry:\n api_response = client.models.Model.create(\n name=name, parent_folder_rid=parent_folder_rid, preview=preview\n )\n print(\"The create response:\\n\")\n pprint(api_response)\nexcept foundry_sdk.PalantirRPCException as e:\n print(\"HTTP error when calling Model.create: %s\\n\" % e)"
@@ -1245,7 +1280,7 @@ export const PYTHON_PLATFORM_SNIPPETS: SdkSnippets [!TIP]
+> This operation returns tabular data that can be converted to data frame formats:
+>
+> ```python
+> # Get data in Arrow format
+> table_data = client.models.Model.Experiment.ArtifactTable.parquet(model_rid, experiment_rid, experiment_artifact_table_name, preview=preview)
+>
+> # Convert to a PyArrow Table
+> arrow_table = table_data.to_pyarrow()
+>
+> # Convert to a Pandas DataFrame
+> pandas_df = table_data.to_pandas()
+>
+> # Convert to a Polars DataFrame
+> polars_df = table_data.to_polars()
+>
+> # Convert to a DuckDB relation
+> duckdb_relation = table_data.to_duckdb()
+> ```
+>
+> For more details, see the [Data Frames section](../../../README.md#data-frames) in the README.
+
+### Example
+
+```python
+from foundry_sdk import FoundryClient
+import foundry_sdk
+from pprint import pprint
+
+client = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname="example.palantirfoundry.com")
+
+# ModelRid
+model_rid = None
+# ExperimentRid
+experiment_rid = None
+# ExperimentArtifactName
+experiment_artifact_table_name = None
+# Optional[PreviewMode] | Enables the use of preview functionality.
+preview = None
+
+
+try:
+ api_response = client.models.Model.Experiment.ArtifactTable.parquet(
+ model_rid, experiment_rid, experiment_artifact_table_name, preview=preview
+ )
+ print("The parquet response:\n")
+ pprint(api_response)
+except foundry_sdk.PalantirRPCException as e:
+ print("HTTP error when calling ArtifactTable.parquet: %s\n" % e)
+
+```
+
+
+
+### Authorization
+
+See [README](../../../README.md#authorization)
+
+### HTTP response details
+| Status Code | Type | Description | Content Type |
+|-------------|-------------|-------------|------------------|
+**200** | bytes | | application/octet-stream |
+
+[[Back to top]](#) [[Back to API list]](../../../README.md#apis-v2-link) [[Back to Model list]](../../../README.md#models-v2-link) [[Back to README]](../../../README.md)
+
diff --git a/docs/v2/Models/ExperimentSeries.md b/docs/v2/Models/ExperimentSeries.md
new file mode 100644
index 00000000..b0b444da
--- /dev/null
+++ b/docs/v2/Models/ExperimentSeries.md
@@ -0,0 +1,159 @@
+# ExperimentSeries
+
+Method | HTTP request | Release Stage |
+------------- | ------------- | ----- |
+[**json**](#json) | **GET** /v2/models/{modelRid}/experiments/{experimentRid}/series/{experimentSeriesName}/json | Private Beta |
+[**parquet**](#parquet) | **GET** /v2/models/{modelRid}/experiments/{experimentRid}/series/{experimentSeriesName}/parquet | Private Beta |
+
+# **json**
+Retrieve raw time-series data for a single series in JSON format.
+Results are paginated with a default page size of 200 and a maximum of 1000.
+
+
+### Parameters
+
+Name | Type | Description | Notes |
+------------- | ------------- | ------------- | ------------- |
+**model_rid** | ModelRid | | |
+**experiment_rid** | ExperimentRid | | |
+**experiment_series_name** | SeriesName | | |
+**offset** | Optional[int] | Number of values to skip from the beginning. Defaults to 0. | [optional] |
+**page_size** | Optional[PageSize] | Maximum number of values to return per page. Default is 200, maximum is 1000. | [optional] |
+**preview** | Optional[PreviewMode] | Enables the use of preview functionality. | [optional] |
+
+### Return type
+**Series**
+
+### Example
+
+```python
+from foundry_sdk import FoundryClient
+import foundry_sdk
+from pprint import pprint
+
+client = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname="example.palantirfoundry.com")
+
+# ModelRid
+model_rid = None
+# ExperimentRid
+experiment_rid = None
+# SeriesName
+experiment_series_name = None
+# Optional[int] | Number of values to skip from the beginning. Defaults to 0.
+offset = None
+# Optional[PageSize] | Maximum number of values to return per page. Default is 200, maximum is 1000.
+page_size = None
+# Optional[PreviewMode] | Enables the use of preview functionality.
+preview = None
+
+
+try:
+ api_response = client.models.Model.Experiment.Series.json(
+ model_rid,
+ experiment_rid,
+ experiment_series_name,
+ offset=offset,
+ page_size=page_size,
+ preview=preview,
+ )
+ print("The json response:\n")
+ pprint(api_response)
+except foundry_sdk.PalantirRPCException as e:
+ print("HTTP error when calling Series.json: %s\n" % e)
+
+```
+
+
+
+### Authorization
+
+See [README](../../../README.md#authorization)
+
+### HTTP response details
+| Status Code | Type | Description | Content Type |
+|-------------|-------------|-------------|------------------|
+**200** | Series | | application/json |
+
+[[Back to top]](#) [[Back to API list]](../../../README.md#apis-v2-link) [[Back to Model list]](../../../README.md#models-v2-link) [[Back to README]](../../../README.md)
+
+# **parquet**
+Retrieve raw time-series data for a single series as a streamed binary response in Apache Parquet format.
+
+
+### Parameters
+
+Name | Type | Description | Notes |
+------------- | ------------- | ------------- | ------------- |
+**model_rid** | ModelRid | | |
+**experiment_rid** | ExperimentRid | | |
+**experiment_series_name** | SeriesName | | |
+**preview** | Optional[PreviewMode] | Enables the use of preview functionality. | [optional] |
+
+### Return type
+**bytes**
+
+> [!TIP]
+> This operation returns tabular data that can be converted to data frame formats:
+>
+> ```python
+> # Get data in Arrow format
+> table_data = client.models.Model.Experiment.Series.parquet(model_rid, experiment_rid, experiment_series_name, preview=preview)
+>
+> # Convert to a PyArrow Table
+> arrow_table = table_data.to_pyarrow()
+>
+> # Convert to a Pandas DataFrame
+> pandas_df = table_data.to_pandas()
+>
+> # Convert to a Polars DataFrame
+> polars_df = table_data.to_polars()
+>
+> # Convert to a DuckDB relation
+> duckdb_relation = table_data.to_duckdb()
+> ```
+>
+> For more details, see the [Data Frames section](../../../README.md#data-frames) in the README.
+
+### Example
+
+```python
+from foundry_sdk import FoundryClient
+import foundry_sdk
+from pprint import pprint
+
+client = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname="example.palantirfoundry.com")
+
+# ModelRid
+model_rid = None
+# ExperimentRid
+experiment_rid = None
+# SeriesName
+experiment_series_name = None
+# Optional[PreviewMode] | Enables the use of preview functionality.
+preview = None
+
+
+try:
+ api_response = client.models.Model.Experiment.Series.parquet(
+ model_rid, experiment_rid, experiment_series_name, preview=preview
+ )
+ print("The parquet response:\n")
+ pprint(api_response)
+except foundry_sdk.PalantirRPCException as e:
+ print("HTTP error when calling Series.parquet: %s\n" % e)
+
+```
+
+
+
+### Authorization
+
+See [README](../../../README.md#authorization)
+
+### HTTP response details
+| Status Code | Type | Description | Content Type |
+|-------------|-------------|-------------|------------------|
+**200** | bytes | | application/octet-stream |
+
+[[Back to top]](#) [[Back to API list]](../../../README.md#apis-v2-link) [[Back to Model list]](../../../README.md#models-v2-link) [[Back to README]](../../../README.md)
+
diff --git a/docs/v2/Models/LiveDeployment.md b/docs/v2/Models/LiveDeployment.md
new file mode 100644
index 00000000..a6e313bc
--- /dev/null
+++ b/docs/v2/Models/LiveDeployment.md
@@ -0,0 +1,62 @@
+# LiveDeployment
+
+Method | HTTP request | Release Stage |
+------------- | ------------- | ----- |
+[**transform_json**](#transform_json) | **POST** /v2/models/liveDeployments/{liveDeploymentRid}/transformJson | Public Beta |
+
+# **transform_json**
+Performs inference on the live deployment.
+
+
+### Parameters
+
+Name | Type | Description | Notes |
+------------- | ------------- | ------------- | ------------- |
+**live_deployment_rid** | LiveDeploymentRid | | |
+**input** | Dict[str, Any] | The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data. | |
+**preview** | Optional[PreviewMode] | Enables the use of preview functionality. | [optional] |
+
+### Return type
+**TransformLiveDeploymentResponse**
+
+### Example
+
+```python
+from foundry_sdk import FoundryClient
+import foundry_sdk
+from pprint import pprint
+
+client = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname="example.palantirfoundry.com")
+
+# LiveDeploymentRid
+live_deployment_rid = None
+# Dict[str, Any] | The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data.
+input = {"input_df": [{"feature_1": 1.0, "feature_2": 2}]}
+# Optional[PreviewMode] | Enables the use of preview functionality.
+preview = None
+
+
+try:
+ api_response = client.models.LiveDeployment.transform_json(
+ live_deployment_rid, input=input, preview=preview
+ )
+ print("The transform_json response:\n")
+ pprint(api_response)
+except foundry_sdk.PalantirRPCException as e:
+ print("HTTP error when calling LiveDeployment.transform_json: %s\n" % e)
+
+```
+
+
+
+### Authorization
+
+See [README](../../../README.md#authorization)
+
+### HTTP response details
+| Status Code | Type | Description | Content Type |
+|-------------|-------------|-------------|------------------|
+**200** | TransformLiveDeploymentResponse | | application/json |
+
+[[Back to top]](#) [[Back to API list]](../../../README.md#apis-v2-link) [[Back to Model list]](../../../README.md#models-v2-link) [[Back to README]](../../../README.md)
+
diff --git a/docs/v2/Models/Model.md b/docs/v2/Models/Model.md
index 520562b8..ba642ed4 100644
--- a/docs/v2/Models/Model.md
+++ b/docs/v2/Models/Model.md
@@ -2,8 +2,8 @@
Method | HTTP request | Release Stage |
------------- | ------------- | ----- |
-[**create**](#create) | **POST** /v2/models | Private Beta |
-[**get**](#get) | **GET** /v2/models/{modelRid} | Private Beta |
+[**create**](#create) | **POST** /v2/models | Public Beta |
+[**get**](#get) | **GET** /v2/models/{modelRid} | Public Beta |
# **create**
Creates a new Model with no versions.
diff --git a/docs/v2/Models/ModelVersion.md b/docs/v2/Models/ModelVersion.md
index 14342693..c2715cdb 100644
--- a/docs/v2/Models/ModelVersion.md
+++ b/docs/v2/Models/ModelVersion.md
@@ -3,8 +3,8 @@
Method | HTTP request | Release Stage |
------------- | ------------- | ----- |
[**create**](#create) | **POST** /v2/models/{modelRid}/versions | Private Beta |
-[**get**](#get) | **GET** /v2/models/{modelRid}/versions/{modelVersionRid} | Private Beta |
-[**list**](#list) | **GET** /v2/models/{modelRid}/versions | Private Beta |
+[**get**](#get) | **GET** /v2/models/{modelRid}/versions/{modelVersionRid} | Public Beta |
+[**list**](#list) | **GET** /v2/models/{modelRid}/versions | Public Beta |
# **create**
Creates a new Model Version on an existing model.
@@ -35,9 +35,9 @@ client = FoundryClient(auth=foundry_sdk.UserTokenAuth(...), hostname="example.pa
# ModelRid
model_rid = None
# List[RID]
-backing_repositories = None
+backing_repositories = ["ri.stemma.main.repository.a1b2c3d4-e5f6-7890-abcd-ef1234567890"]
# List[str]
-conda_requirements = None
+conda_requirements = ["numpy==1.24.0", "pandas==2.0.0"]
# ModelApi
model_api = {
"inputs": [
diff --git a/docs/v2/Models/models/BooleanParameter.md b/docs/v2/Models/models/BooleanParameter.md
new file mode 100644
index 00000000..c734b1d9
--- /dev/null
+++ b/docs/v2/Models/models/BooleanParameter.md
@@ -0,0 +1,12 @@
+# BooleanParameter
+
+A boolean parameter value.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**value** | bool | Yes | |
+**type** | Literal["boolean"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/DatetimeParameter.md b/docs/v2/Models/models/DatetimeParameter.md
new file mode 100644
index 00000000..a5878836
--- /dev/null
+++ b/docs/v2/Models/models/DatetimeParameter.md
@@ -0,0 +1,12 @@
+# DatetimeParameter
+
+A datetime parameter value.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**value** | datetime | Yes | |
+**type** | Literal["datetime"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/DoubleParameter.md b/docs/v2/Models/models/DoubleParameter.md
new file mode 100644
index 00000000..5fe9d636
--- /dev/null
+++ b/docs/v2/Models/models/DoubleParameter.md
@@ -0,0 +1,12 @@
+# DoubleParameter
+
+A double parameter value.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**value** | float | Yes | |
+**type** | Literal["double"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/DoubleSeriesAggregations.md b/docs/v2/Models/models/DoubleSeriesAggregations.md
new file mode 100644
index 00000000..040c2211
--- /dev/null
+++ b/docs/v2/Models/models/DoubleSeriesAggregations.md
@@ -0,0 +1,14 @@
+# DoubleSeriesAggregations
+
+Aggregated statistics for numeric series.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**min** | float | Yes | Minimum value in the series |
+**max** | float | Yes | Maximum value in the series |
+**last** | float | Yes | Most recent value in the series |
+**type** | Literal["double"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/DoubleSeriesV1.md b/docs/v2/Models/models/DoubleSeriesV1.md
new file mode 100644
index 00000000..0163b11d
--- /dev/null
+++ b/docs/v2/Models/models/DoubleSeriesV1.md
@@ -0,0 +1,12 @@
+# DoubleSeriesV1
+
+A series of double values.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**series** | List[DoubleSeriesValueV1] | Yes | |
+**type** | Literal["doubleV1"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/DoubleSeriesValueV1.md b/docs/v2/Models/models/DoubleSeriesValueV1.md
new file mode 100644
index 00000000..a69572b6
--- /dev/null
+++ b/docs/v2/Models/models/DoubleSeriesValueV1.md
@@ -0,0 +1,13 @@
+# DoubleSeriesValueV1
+
+A single double value in a series.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**value** | float | Yes | |
+**timestamp** | EpochMillis | Yes | Milliseconds since unix time zero |
+**step** | Long | Yes | |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/EpochMillis.md b/docs/v2/Models/models/EpochMillis.md
new file mode 100644
index 00000000..42abb198
--- /dev/null
+++ b/docs/v2/Models/models/EpochMillis.md
@@ -0,0 +1,13 @@
+# EpochMillis
+
+Milliseconds since unix time zero. This representation is used to maintain consistency with the Parquet
+format.
+
+
+## Type
+```python
+Long
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/Experiment.md b/docs/v2/Models/models/Experiment.md
new file mode 100644
index 00000000..1327c0e9
--- /dev/null
+++ b/docs/v2/Models/models/Experiment.md
@@ -0,0 +1,26 @@
+# Experiment
+
+Experiment
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**rid** | ExperimentRid | Yes | |
+**model_rid** | ModelRid | Yes | |
+**name** | ExperimentName | Yes | |
+**created_at** | CreatedTime | Yes | |
+**created_by** | CreatedBy | Yes | |
+**source** | ExperimentSource | Yes | |
+**status** | ExperimentStatus | Yes | |
+**status_message** | Optional[str] | No | |
+**branch** | ExperimentBranch | Yes | |
+**parameters** | List[Parameter] | Yes | |
+**series** | List[SeriesAggregations] | Yes | |
+**summary_metrics** | List[SummaryMetric] | Yes | |
+**artifacts** | Dict[ExperimentArtifactName, ExperimentArtifactMetadata] | Yes | |
+**tags** | List[ExperimentTagText] | Yes | |
+**linked_model_version** | Optional[ModelVersionRid] | No | |
+**job_rid** | Optional[JobRid] | No | |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentArtifactDetails.md b/docs/v2/Models/models/ExperimentArtifactDetails.md
new file mode 100644
index 00000000..cedbdf83
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentArtifactDetails.md
@@ -0,0 +1,11 @@
+# ExperimentArtifactDetails
+
+Details about an experiment artifact.
+
+## Type
+```python
+TableArtifactDetails
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentArtifactMetadata.md b/docs/v2/Models/models/ExperimentArtifactMetadata.md
new file mode 100644
index 00000000..9ffc9927
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentArtifactMetadata.md
@@ -0,0 +1,14 @@
+# ExperimentArtifactMetadata
+
+Metadata about an experiment artifact.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**name** | ExperimentArtifactName | Yes | |
+**description** | Optional[str] | No | |
+**size_bytes** | SizeBytes | Yes | |
+**details** | ExperimentArtifactDetails | Yes | |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentArtifactName.md b/docs/v2/Models/models/ExperimentArtifactName.md
new file mode 100644
index 00000000..2389fd0a
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentArtifactName.md
@@ -0,0 +1,11 @@
+# ExperimentArtifactName
+
+The name of an experiment artifact.
+
+## Type
+```python
+str
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentAuthoringSource.md b/docs/v2/Models/models/ExperimentAuthoringSource.md
new file mode 100644
index 00000000..7bfc2dc7
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentAuthoringSource.md
@@ -0,0 +1,12 @@
+# ExperimentAuthoringSource
+
+Experiment created from an authoring repository.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**stemma_rid** | RID | Yes | |
+**type** | Literal["authoring"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentBranch.md b/docs/v2/Models/models/ExperimentBranch.md
new file mode 100644
index 00000000..606f44d8
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentBranch.md
@@ -0,0 +1,11 @@
+# ExperimentBranch
+
+ExperimentBranch
+
+## Type
+```python
+str
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentCodeWorkspaceSource.md b/docs/v2/Models/models/ExperimentCodeWorkspaceSource.md
new file mode 100644
index 00000000..462b0644
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentCodeWorkspaceSource.md
@@ -0,0 +1,13 @@
+# ExperimentCodeWorkspaceSource
+
+Experiment created from a code workspace.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**container_rid** | RID | Yes | |
+**deployment_rid** | Optional[RID] | No | |
+**type** | Literal["codeWorkspace"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentName.md b/docs/v2/Models/models/ExperimentName.md
new file mode 100644
index 00000000..3212b832
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentName.md
@@ -0,0 +1,11 @@
+# ExperimentName
+
+ExperimentName
+
+## Type
+```python
+str
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentRid.md b/docs/v2/Models/models/ExperimentRid.md
new file mode 100644
index 00000000..d945297e
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentRid.md
@@ -0,0 +1,11 @@
+# ExperimentRid
+
+The Resource Identifier (RID) of an Experiment.
+
+## Type
+```python
+RID
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentSdkSource.md b/docs/v2/Models/models/ExperimentSdkSource.md
new file mode 100644
index 00000000..d4f5908a
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentSdkSource.md
@@ -0,0 +1,11 @@
+# ExperimentSdkSource
+
+Experiment created from the SDK.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**type** | Literal["sdk"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentSource.md b/docs/v2/Models/models/ExperimentSource.md
new file mode 100644
index 00000000..5fa72bfe
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentSource.md
@@ -0,0 +1,17 @@
+# ExperimentSource
+
+The source from which the experiment was created.
+
+This is a discriminator type and does not contain any fields. Instead, it is a union
+of of the models listed below.
+
+This discriminator class uses the `type` field to differentiate between classes.
+
+| Class | Value
+| ------------ | -------------
+ExperimentCodeWorkspaceSource | codeWorkspace
+ExperimentAuthoringSource | authoring
+ExperimentSdkSource | sdk
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentStatus.md b/docs/v2/Models/models/ExperimentStatus.md
new file mode 100644
index 00000000..73bd331f
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentStatus.md
@@ -0,0 +1,12 @@
+# ExperimentStatus
+
+The current status of an experiment.
+
+| **Value** |
+| --------- |
+| `"RUNNING"` |
+| `"SUCCEEDED"` |
+| `"FAILED"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ExperimentTagText.md b/docs/v2/Models/models/ExperimentTagText.md
new file mode 100644
index 00000000..bbae37fb
--- /dev/null
+++ b/docs/v2/Models/models/ExperimentTagText.md
@@ -0,0 +1,11 @@
+# ExperimentTagText
+
+A tag associated with an experiment.
+
+## Type
+```python
+str
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/InconsistentArrayDimensionsError.md b/docs/v2/Models/models/InconsistentArrayDimensionsError.md
new file mode 100644
index 00000000..f4560531
--- /dev/null
+++ b/docs/v2/Models/models/InconsistentArrayDimensionsError.md
@@ -0,0 +1,13 @@
+# InconsistentArrayDimensionsError
+
+Array elements have inconsistent dimensions.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**first_element_shape** | List[int] | Yes | The shape of the first array element |
+**conflicting_element_shape** | List[int] | Yes | The shape of the conflicting array element |
+**type** | Literal["inconsistentArrayDimensions"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/InferenceInputErrorType.md b/docs/v2/Models/models/InferenceInputErrorType.md
new file mode 100644
index 00000000..a92a2eaa
--- /dev/null
+++ b/docs/v2/Models/models/InferenceInputErrorType.md
@@ -0,0 +1,24 @@
+# InferenceInputErrorType
+
+The specific type and details of an input validation error for inference requests.
+Each variant carries parameters relevant to that specific error category.
+
+
+This is a discriminator type and does not contain any fields. Instead, it is a union
+of of the models listed below.
+
+This discriminator class uses the `type` field to differentiate between classes.
+
+| Class | Value
+| ------------ | -------------
+InvalidArrayShapeError | invalidArrayShape
+TypeMismatchError | typeMismatch
+UnsupportedTypeError | unsupportedType
+UnknownInputNameError | unknownInputName
+InvalidTabularFormatError | invalidTabularFormat
+InconsistentArrayDimensionsError | inconsistentArrayDimensions
+RequiredValueMissingError | requiredValueMissing
+InvalidMapFormatError | invalidMapFormat
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/IntegerParameter.md b/docs/v2/Models/models/IntegerParameter.md
new file mode 100644
index 00000000..b259fdb8
--- /dev/null
+++ b/docs/v2/Models/models/IntegerParameter.md
@@ -0,0 +1,12 @@
+# IntegerParameter
+
+An integer parameter value.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**value** | Long | Yes | |
+**type** | Literal["integer"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/InvalidArrayShapeError.md b/docs/v2/Models/models/InvalidArrayShapeError.md
new file mode 100644
index 00000000..0e4fa2a8
--- /dev/null
+++ b/docs/v2/Models/models/InvalidArrayShapeError.md
@@ -0,0 +1,13 @@
+# InvalidArrayShapeError
+
+Array dimensions do not match expected ndarray shape.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**expected_shape** | List[int] | Yes | The expected array shape from the model API specification |
+**actual_shape** | Optional[List[int]] | No | The actual shape of the provided array |
+**type** | Literal["invalidArrayShape"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/InvalidMapFormatError.md b/docs/v2/Models/models/InvalidMapFormatError.md
new file mode 100644
index 00000000..f80e7b2d
--- /dev/null
+++ b/docs/v2/Models/models/InvalidMapFormatError.md
@@ -0,0 +1,11 @@
+# InvalidMapFormatError
+
+Map input has incorrect structure or null keys.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**type** | Literal["invalidMapFormat"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/InvalidTabularFormatError.md b/docs/v2/Models/models/InvalidTabularFormatError.md
new file mode 100644
index 00000000..9ada3e31
--- /dev/null
+++ b/docs/v2/Models/models/InvalidTabularFormatError.md
@@ -0,0 +1,12 @@
+# InvalidTabularFormatError
+
+Tabular input has incorrect JSON structure.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**input_field_name** | str | Yes | The name of the tabular input field with incorrect format |
+**type** | Literal["invalidTabularFormat"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/LiveDeploymentRid.md b/docs/v2/Models/models/LiveDeploymentRid.md
new file mode 100644
index 00000000..50a69a36
--- /dev/null
+++ b/docs/v2/Models/models/LiveDeploymentRid.md
@@ -0,0 +1,11 @@
+# LiveDeploymentRid
+
+The Resource Identifier (RID) of a Live Deployment.
+
+## Type
+```python
+RID
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/Parameter.md b/docs/v2/Models/models/Parameter.md
new file mode 100644
index 00000000..db831869
--- /dev/null
+++ b/docs/v2/Models/models/Parameter.md
@@ -0,0 +1,12 @@
+# Parameter
+
+A parameter with its name and value.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**name** | ParameterName | Yes | The parameter name |
+**value** | ParameterValue | Yes | The parameter value |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ParameterName.md b/docs/v2/Models/models/ParameterName.md
new file mode 100644
index 00000000..2aabb12c
--- /dev/null
+++ b/docs/v2/Models/models/ParameterName.md
@@ -0,0 +1,11 @@
+# ParameterName
+
+The name of an experiment parameter.
+
+## Type
+```python
+str
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/ParameterValue.md b/docs/v2/Models/models/ParameterValue.md
new file mode 100644
index 00000000..b26b286d
--- /dev/null
+++ b/docs/v2/Models/models/ParameterValue.md
@@ -0,0 +1,19 @@
+# ParameterValue
+
+A parameter value logged for an experiment.
+
+This is a discriminator type and does not contain any fields. Instead, it is a union
+of of the models listed below.
+
+This discriminator class uses the `type` field to differentiate between classes.
+
+| Class | Value
+| ------------ | -------------
+DatetimeParameter | datetime
+BooleanParameter | boolean
+StringParameter | string
+DoubleParameter | double
+IntegerParameter | integer
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/RequiredValueMissingError.md b/docs/v2/Models/models/RequiredValueMissingError.md
new file mode 100644
index 00000000..0a89cf75
--- /dev/null
+++ b/docs/v2/Models/models/RequiredValueMissingError.md
@@ -0,0 +1,12 @@
+# RequiredValueMissingError
+
+Required input field is null or missing.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**field_name** | str | Yes | The name of the required field that was null or missing |
+**type** | Literal["requiredValueMissing"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsAndFilter.md b/docs/v2/Models/models/SearchExperimentsAndFilter.md
new file mode 100644
index 00000000..6f897b3b
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsAndFilter.md
@@ -0,0 +1,12 @@
+# SearchExperimentsAndFilter
+
+Returns experiments where every filter is satisfied.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**filters** | List[SearchExperimentsFilter] | Yes | |
+**type** | Literal["and"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsContainsFilter.md b/docs/v2/Models/models/SearchExperimentsContainsFilter.md
new file mode 100644
index 00000000..a2efe5ba
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsContainsFilter.md
@@ -0,0 +1,13 @@
+# SearchExperimentsContainsFilter
+
+Filter for substring containment matches.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**field** | SearchExperimentsContainsFilterField | Yes | |
+**value** | Any | Yes | |
+**type** | Literal["contains"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsContainsFilterField.md b/docs/v2/Models/models/SearchExperimentsContainsFilterField.md
new file mode 100644
index 00000000..88ca1aee
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsContainsFilterField.md
@@ -0,0 +1,12 @@
+# SearchExperimentsContainsFilterField
+
+Fields that support substring containment filtering.
+
+| **Value** |
+| --------- |
+| `"EXPERIMENT_NAME"` |
+| `"PARAMETER_NAME"` |
+| `"SERIES_NAME"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsEqualsFilter.md b/docs/v2/Models/models/SearchExperimentsEqualsFilter.md
new file mode 100644
index 00000000..64d917e9
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsEqualsFilter.md
@@ -0,0 +1,13 @@
+# SearchExperimentsEqualsFilter
+
+Filter for exact field value matches.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**field** | SearchExperimentsEqualsFilterField | Yes | |
+**value** | Any | Yes | |
+**type** | Literal["eq"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsEqualsFilterField.md b/docs/v2/Models/models/SearchExperimentsEqualsFilterField.md
new file mode 100644
index 00000000..223db777
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsEqualsFilterField.md
@@ -0,0 +1,17 @@
+# SearchExperimentsEqualsFilterField
+
+Fields that support equality filtering.
+
+| **Value** |
+| --------- |
+| `"STATUS"` |
+| `"BRANCH"` |
+| `"EXPERIMENT_NAME"` |
+| `"EXPERIMENT_RID"` |
+| `"JOB_RID"` |
+| `"TAG"` |
+| `"PARAMETER_NAME"` |
+| `"SERIES_NAME"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsFilter.md b/docs/v2/Models/models/SearchExperimentsFilter.md
new file mode 100644
index 00000000..8410efef
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsFilter.md
@@ -0,0 +1,35 @@
+# SearchExperimentsFilter
+
+Filter for searching experiments using operator-based composition.
+Supports equality, text matching, boolean combination operators, and compound filters
+that atomically bind a name to a value comparison.
+
+Example filters:
+- Simple status: {"eq": {"field": "STATUS", "value": "RUNNING"}}
+- Branch match: {"eq": {"field": "BRANCH", "value": "master"}}
+- Parameter filter: {"parameterFilter": {"parameterName": "learning_rate", "operator": "GT", "value": 0.01}}
+- Combined: {"and": {"filters": [
+ {"eq": {"field": "STATUS", "value": "SUCCEEDED"}},
+ {"parameterFilter": {"parameterName": "learning_rate", "operator": "GT", "value": 0.5}}
+ ]}}
+
+
+This is a discriminator type and does not contain any fields. Instead, it is a union
+of of the models listed below.
+
+This discriminator class uses the `type` field to differentiate between classes.
+
+| Class | Value
+| ------------ | -------------
+SearchExperimentsSeriesFilter | seriesFilter
+SearchExperimentsContainsFilter | contains
+SearchExperimentsNotFilter | not
+SearchExperimentsOrFilter | or
+SearchExperimentsAndFilter | and
+SearchExperimentsParameterFilter | parameterFilter
+SearchExperimentsSummaryMetricFilter | summaryMetricFilter
+SearchExperimentsEqualsFilter | eq
+SearchExperimentsStartsWithFilter | startsWith
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsFilterOperator.md b/docs/v2/Models/models/SearchExperimentsFilterOperator.md
new file mode 100644
index 00000000..70f460c5
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsFilterOperator.md
@@ -0,0 +1,13 @@
+# SearchExperimentsFilterOperator
+
+Comparison operator for compound filter predicates.
+
+| **Value** |
+| --------- |
+| `"EQ"` |
+| `"GT"` |
+| `"LT"` |
+| `"CONTAINS"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsNotFilter.md b/docs/v2/Models/models/SearchExperimentsNotFilter.md
new file mode 100644
index 00000000..cf9a26b9
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsNotFilter.md
@@ -0,0 +1,12 @@
+# SearchExperimentsNotFilter
+
+Returns experiments where the filter is not satisfied.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**value** | SearchExperimentsFilter | Yes | |
+**type** | Literal["not"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsOrFilter.md b/docs/v2/Models/models/SearchExperimentsOrFilter.md
new file mode 100644
index 00000000..d609a8ef
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsOrFilter.md
@@ -0,0 +1,12 @@
+# SearchExperimentsOrFilter
+
+Returns experiments where at least one filter is satisfied.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**filters** | List[SearchExperimentsFilter] | Yes | |
+**type** | Literal["or"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsOrderBy.md b/docs/v2/Models/models/SearchExperimentsOrderBy.md
new file mode 100644
index 00000000..aa02f1e7
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsOrderBy.md
@@ -0,0 +1,12 @@
+# SearchExperimentsOrderBy
+
+Ordering configuration for experiment search results.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**field** | SearchExperimentsOrderByField | Yes | |
+**direction** | OrderByDirection | Yes | |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsOrderByField.md b/docs/v2/Models/models/SearchExperimentsOrderByField.md
new file mode 100644
index 00000000..b5874de1
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsOrderByField.md
@@ -0,0 +1,11 @@
+# SearchExperimentsOrderByField
+
+Fields to order experiment search results by.
+
+| **Value** |
+| --------- |
+| `"EXPERIMENT_NAME"` |
+| `"CREATED_AT"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsParameterFilter.md b/docs/v2/Models/models/SearchExperimentsParameterFilter.md
new file mode 100644
index 00000000..d5543e52
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsParameterFilter.md
@@ -0,0 +1,20 @@
+# SearchExperimentsParameterFilter
+
+Filter that atomically binds a parameter name to a value comparison,
+ensuring both conditions are evaluated on the same parameter.
+Supported combinations:
+- EQ: boolean, double, integer, datetime, or string value
+- GT/LT: double, integer, or datetime value
+- CONTAINS: string value (matches the parameter's string value)
+
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**parameter_name** | ParameterName | Yes | The exact name of the parameter to filter on. |
+**operator** | SearchExperimentsFilterOperator | Yes | The comparison operator to apply. |
+**value** | Any | Yes | The value to compare against. |
+**type** | Literal["parameterFilter"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsRequest.md b/docs/v2/Models/models/SearchExperimentsRequest.md
new file mode 100644
index 00000000..29572bfd
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsRequest.md
@@ -0,0 +1,14 @@
+# SearchExperimentsRequest
+
+SearchExperimentsRequest
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**where** | Optional[SearchExperimentsFilter] | No | Optional search filter for filtering experiments. If not provided, all experiments for the model are returned. |
+**order_by** | Optional[SearchExperimentsOrderBy] | No | The field to sort by. Default is to sort by relevance. |
+**page_size** | Optional[PageSize] | No | The maximum number of results to return. Default 50, maximum of 100. |
+**page_token** | Optional[PageToken] | No | PageToken to identify the next page to retrieve. Leave empty for the first request. |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsResponse.md b/docs/v2/Models/models/SearchExperimentsResponse.md
new file mode 100644
index 00000000..a18a7297
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsResponse.md
@@ -0,0 +1,12 @@
+# SearchExperimentsResponse
+
+Response from searching experiments.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**data** | List[Experiment] | Yes | List of experiments matching the search criteria. |
+**next_page_token** | Optional[PageToken] | No | Token for retrieving the next page of results, if more results are available. |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsSeriesFilter.md b/docs/v2/Models/models/SearchExperimentsSeriesFilter.md
new file mode 100644
index 00000000..006107ed
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsSeriesFilter.md
@@ -0,0 +1,17 @@
+# SearchExperimentsSeriesFilter
+
+Filter that atomically binds a series name to a metric comparison,
+ensuring all conditions are evaluated on the same series.
+
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**series_name** | SeriesName | Yes | The name of the series to filter on. |
+**field** | SearchExperimentsSeriesFilterField | Yes | The series metric to compare. |
+**operator** | SearchExperimentsFilterOperator | Yes | The comparison operator (EQ, GT, or LT). |
+**value** | Any | Yes | The value to compare against. |
+**type** | Literal["seriesFilter"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsSeriesFilterField.md b/docs/v2/Models/models/SearchExperimentsSeriesFilterField.md
new file mode 100644
index 00000000..267d7c9b
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsSeriesFilterField.md
@@ -0,0 +1,13 @@
+# SearchExperimentsSeriesFilterField
+
+The series metric to filter on.
+
+| **Value** |
+| --------- |
+| `"LENGTH"` |
+| `"AGGREGATION_MIN"` |
+| `"AGGREGATION_MAX"` |
+| `"AGGREGATION_LAST"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsStartsWithFilter.md b/docs/v2/Models/models/SearchExperimentsStartsWithFilter.md
new file mode 100644
index 00000000..ccdd9ae7
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsStartsWithFilter.md
@@ -0,0 +1,13 @@
+# SearchExperimentsStartsWithFilter
+
+Filter for prefix matches.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**field** | SearchExperimentsStartsWithFilterField | Yes | |
+**value** | Any | Yes | |
+**type** | Literal["startsWith"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsStartsWithFilterField.md b/docs/v2/Models/models/SearchExperimentsStartsWithFilterField.md
new file mode 100644
index 00000000..fdca6423
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsStartsWithFilterField.md
@@ -0,0 +1,12 @@
+# SearchExperimentsStartsWithFilterField
+
+Fields that support prefix filtering.
+
+| **Value** |
+| --------- |
+| `"EXPERIMENT_NAME"` |
+| `"PARAMETER_NAME"` |
+| `"SERIES_NAME"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SearchExperimentsSummaryMetricFilter.md b/docs/v2/Models/models/SearchExperimentsSummaryMetricFilter.md
new file mode 100644
index 00000000..6d7b37b0
--- /dev/null
+++ b/docs/v2/Models/models/SearchExperimentsSummaryMetricFilter.md
@@ -0,0 +1,17 @@
+# SearchExperimentsSummaryMetricFilter
+
+Filter that atomically binds a series name and aggregation type to a value comparison,
+ensuring all conditions are evaluated on the same summary metric.
+
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**series_name** | SeriesName | Yes | The name of the series this metric belongs to. |
+**aggregation** | SummaryMetricAggregation | Yes | The aggregation type (MIN, MAX, LAST). |
+**operator** | SearchExperimentsFilterOperator | Yes | The comparison operator (EQ, GT, or LT). |
+**value** | Any | Yes | The value to compare against. |
+**type** | Literal["summaryMetricFilter"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/Series.md b/docs/v2/Models/models/Series.md
new file mode 100644
index 00000000..c8ca8c11
--- /dev/null
+++ b/docs/v2/Models/models/Series.md
@@ -0,0 +1,11 @@
+# Series
+
+A series of values logged over time.
+
+## Type
+```python
+DoubleSeriesV1
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SeriesAggregations.md b/docs/v2/Models/models/SeriesAggregations.md
new file mode 100644
index 00000000..109c6ec4
--- /dev/null
+++ b/docs/v2/Models/models/SeriesAggregations.md
@@ -0,0 +1,13 @@
+# SeriesAggregations
+
+Series with precomputed aggregation values.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**name** | SeriesName | Yes | The series name |
+**length** | Long | Yes | Number of values in the series |
+**value** | SeriesAggregationsValue | Yes | Aggregated values for this series |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SeriesAggregationsValue.md b/docs/v2/Models/models/SeriesAggregationsValue.md
new file mode 100644
index 00000000..0b9ebdcc
--- /dev/null
+++ b/docs/v2/Models/models/SeriesAggregationsValue.md
@@ -0,0 +1,11 @@
+# SeriesAggregationsValue
+
+Union of aggregation values by series type.
+
+## Type
+```python
+DoubleSeriesAggregations
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SeriesName.md b/docs/v2/Models/models/SeriesName.md
new file mode 100644
index 00000000..d6d21da1
--- /dev/null
+++ b/docs/v2/Models/models/SeriesName.md
@@ -0,0 +1,11 @@
+# SeriesName
+
+The name of a series (metrics tracked over time).
+
+## Type
+```python
+str
+```
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/StringParameter.md b/docs/v2/Models/models/StringParameter.md
new file mode 100644
index 00000000..1ea3bb65
--- /dev/null
+++ b/docs/v2/Models/models/StringParameter.md
@@ -0,0 +1,12 @@
+# StringParameter
+
+A string parameter value.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**value** | str | Yes | |
+**type** | Literal["string"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SummaryMetric.md b/docs/v2/Models/models/SummaryMetric.md
new file mode 100644
index 00000000..2f609325
--- /dev/null
+++ b/docs/v2/Models/models/SummaryMetric.md
@@ -0,0 +1,13 @@
+# SummaryMetric
+
+A summary metric with series name, aggregation type, and computed value.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**series_name** | SeriesName | Yes | Name of the series this metric belongs to |
+**aggregation** | SummaryMetricAggregation | Yes | Type of aggregation (MIN, MAX, LAST) |
+**value** | float | Yes | The computed value |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/SummaryMetricAggregation.md b/docs/v2/Models/models/SummaryMetricAggregation.md
new file mode 100644
index 00000000..1324a78d
--- /dev/null
+++ b/docs/v2/Models/models/SummaryMetricAggregation.md
@@ -0,0 +1,12 @@
+# SummaryMetricAggregation
+
+The type of aggregation computed for a summary metric.
+
+| **Value** |
+| --------- |
+| `"MIN"` |
+| `"MAX"` |
+| `"LAST"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/TableArtifactDetails.md b/docs/v2/Models/models/TableArtifactDetails.md
new file mode 100644
index 00000000..2c9f650f
--- /dev/null
+++ b/docs/v2/Models/models/TableArtifactDetails.md
@@ -0,0 +1,12 @@
+# TableArtifactDetails
+
+Details about a table artifact.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**row_count** | Long | Yes | |
+**type** | Literal["table"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/TransformJsonLiveDeploymentRequest.md b/docs/v2/Models/models/TransformJsonLiveDeploymentRequest.md
new file mode 100644
index 00000000..12405c35
--- /dev/null
+++ b/docs/v2/Models/models/TransformJsonLiveDeploymentRequest.md
@@ -0,0 +1,11 @@
+# TransformJsonLiveDeploymentRequest
+
+TransformJsonLiveDeploymentRequest
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**input** | Dict[str, Any] | Yes | The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data. |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/TransformLiveDeploymentResponse.md b/docs/v2/Models/models/TransformLiveDeploymentResponse.md
new file mode 100644
index 00000000..1dec4129
--- /dev/null
+++ b/docs/v2/Models/models/TransformLiveDeploymentResponse.md
@@ -0,0 +1,11 @@
+# TransformLiveDeploymentResponse
+
+The response from transforming input data using a live deployment.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**output** | Dict[str, Any] | Yes | The output data from the model inference. The structure depends on the model's defined API specification, where each key is an output name and the value is the corresponding output data. |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/TypeMismatchError.md b/docs/v2/Models/models/TypeMismatchError.md
new file mode 100644
index 00000000..05820a2a
--- /dev/null
+++ b/docs/v2/Models/models/TypeMismatchError.md
@@ -0,0 +1,13 @@
+# TypeMismatchError
+
+Input type does not match expected type in model API.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**expected_type** | str | Yes | The expected type from the model API specification |
+**actual_type** | str | Yes | The actual type provided in the input |
+**type** | Literal["typeMismatch"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/UnknownInputNameError.md b/docs/v2/Models/models/UnknownInputNameError.md
new file mode 100644
index 00000000..2b248c0a
--- /dev/null
+++ b/docs/v2/Models/models/UnknownInputNameError.md
@@ -0,0 +1,12 @@
+# UnknownInputNameError
+
+Provided input name not found in model API specification.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**input_name** | str | Yes | The input name that was not found in the model API specification |
+**type** | Literal["unknownInputName"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Models/models/UnsupportedTypeError.md b/docs/v2/Models/models/UnsupportedTypeError.md
new file mode 100644
index 00000000..f969db2a
--- /dev/null
+++ b/docs/v2/Models/models/UnsupportedTypeError.md
@@ -0,0 +1,12 @@
+# UnsupportedTypeError
+
+Input contains an unsupported data type.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**unsupported_type** | str | Yes | The unsupported data type |
+**type** | Literal["unsupportedType"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Ontologies/models/BoundingBoxValue.md b/docs/v2/Ontologies/models/BoundingBoxValue.md
index 4aaf5ddb..bde66177 100644
--- a/docs/v2/Ontologies/models/BoundingBoxValue.md
+++ b/docs/v2/Ontologies/models/BoundingBoxValue.md
@@ -8,6 +8,7 @@ The top left and bottom right coordinate points that make up the bounding box.
| ------------ | ------------- | ------------- | ------------- |
**top_left** | WithinBoundingBoxPoint | Yes | |
**bottom_right** | WithinBoundingBoxPoint | Yes | |
+**type** | Literal["envelope"] | Yes | None |
[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Ontologies/models/ContainsAllTermsQuery.md b/docs/v2/Ontologies/models/ContainsAllTermsQuery.md
index 4e43def7..407109ff 100644
--- a/docs/v2/Ontologies/models/ContainsAllTermsQuery.md
+++ b/docs/v2/Ontologies/models/ContainsAllTermsQuery.md
@@ -1,7 +1,7 @@
# ContainsAllTermsQuery
Returns objects where the specified field contains all of the whitespace separated words in any
-order in the provided value. This query supports fuzzy matching. Allows you to specify a property to query on
+order in the provided value. This query supports fuzzy matching. Allows you to specify a property to query on
by a variety of means. Either `field` or `propertyIdentifier` must be supplied, but not both.
diff --git a/docs/v2/Ontologies/models/GeoJsonString.md b/docs/v2/Ontologies/models/GeoJsonString.md
new file mode 100644
index 00000000..d1d9a981
--- /dev/null
+++ b/docs/v2/Ontologies/models/GeoJsonString.md
@@ -0,0 +1,12 @@
+# GeoJsonString
+
+A GeoJSON geometry specification.
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**geo_json** | str | Yes | A GeoJSON geometry string. Supported geometry types include Point, MultiPoint, LineString, MultiLineString, Polygon, MultiPolygon, and GeometryCollection. |
+**type** | Literal["geoJson"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Ontologies/models/GeoShapeV2Geometry.md b/docs/v2/Ontologies/models/GeoShapeV2Geometry.md
new file mode 100644
index 00000000..935e12eb
--- /dev/null
+++ b/docs/v2/Ontologies/models/GeoShapeV2Geometry.md
@@ -0,0 +1,17 @@
+# GeoShapeV2Geometry
+
+Geometry specification for a GeoShapeV2Query. Supports bounding box envelopes and arbitrary GeoJSON geometries.
+
+
+This is a discriminator type and does not contain any fields. Instead, it is a union
+of of the models listed below.
+
+This discriminator class uses the `type` field to differentiate between classes.
+
+| Class | Value
+| ------------ | -------------
+BoundingBoxValue | envelope
+GeoJsonString | geoJson
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Ontologies/models/GeoShapeV2Query.md b/docs/v2/Ontologies/models/GeoShapeV2Query.md
new file mode 100644
index 00000000..2f73a3ff
--- /dev/null
+++ b/docs/v2/Ontologies/models/GeoShapeV2Query.md
@@ -0,0 +1,18 @@
+# GeoShapeV2Query
+
+Returns objects where the specified field satisfies the provided geometry query with the given spatial operator.
+Supports both envelope (bounding box) and GeoJSON geometries for filtering geopoint or geoshape properties.
+Either `field` or `propertyIdentifier` can be supplied, but not both.
+
+
+## Properties
+| Name | Type | Required | Description |
+| ------------ | ------------- | ------------- | ------------- |
+**field** | Optional[PropertyApiName] | No | |
+**property_identifier** | Optional[PropertyIdentifier] | No | |
+**geometry** | GeoShapeV2Geometry | Yes | |
+**spatial_filter_mode** | SpatialFilterMode | Yes | |
+**type** | Literal["geoShapeV2"] | Yes | None |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/docs/v2/Ontologies/models/OntologyDataType.md b/docs/v2/Ontologies/models/OntologyDataType.md
index fa382d60..15f8e483 100644
--- a/docs/v2/Ontologies/models/OntologyDataType.md
+++ b/docs/v2/Ontologies/models/OntologyDataType.md
@@ -24,6 +24,7 @@ BooleanType | boolean
CipherTextType | cipherText
MarkingType | marking
UnsupportedType | unsupported
+MediaReferenceType | mediaReference
OntologyArrayType | array
OntologyObjectSetType | objectSet
BinaryType | binary
diff --git a/docs/v2/Ontologies/models/QueryDataType.md b/docs/v2/Ontologies/models/QueryDataType.md
index 3877d583..e6c868ba 100644
--- a/docs/v2/Ontologies/models/QueryDataType.md
+++ b/docs/v2/Ontologies/models/QueryDataType.md
@@ -25,6 +25,7 @@ LongType | long
BooleanType | boolean
UnsupportedType | unsupported
AttachmentType | attachment
+MediaReferenceType | mediaReference
NullType | null
QueryArrayType | array
OntologyObjectSetType | objectSet
diff --git a/docs/v2/Ontologies/models/SearchJsonQueryV2.md b/docs/v2/Ontologies/models/SearchJsonQueryV2.md
index 40876afb..7c374cd8 100644
--- a/docs/v2/Ontologies/models/SearchJsonQueryV2.md
+++ b/docs/v2/Ontologies/models/SearchJsonQueryV2.md
@@ -35,6 +35,7 @@ RegexQuery | regex
IsNullQueryV2 | isNull
ContainsAnyTermQuery | containsAnyTerm
IntervalQuery | interval
+GeoShapeV2Query | geoShapeV2
StartsWithQuery | startsWith
diff --git a/docs/v2/Ontologies/models/SpatialFilterMode.md b/docs/v2/Ontologies/models/SpatialFilterMode.md
new file mode 100644
index 00000000..5c5c0bc1
--- /dev/null
+++ b/docs/v2/Ontologies/models/SpatialFilterMode.md
@@ -0,0 +1,16 @@
+# SpatialFilterMode
+
+The spatial relation operator for a GeoShapeV2Query. INTERSECTS matches objects that intersect the provided
+geometry, DISJOINT matches objects that do not intersect the provided geometry, WITHIN matches objects that
+lie within the provided geometry, and CONTAINS matches objects that contain the provided geometry.
+
+
+| **Value** |
+| --------- |
+| `"INTERSECTS"` |
+| `"DISJOINT"` |
+| `"WITHIN"` |
+| `"CONTAINS"` |
+
+
+[[Back to Model list]](../../../../README.md#models-v2-link) [[Back to API list]](../../../../README.md#apis-v2-link) [[Back to README]](../../../../README.md)
diff --git a/foundry_sdk/__init__.py b/foundry_sdk/__init__.py
index 6eb7b29d..e1cbdd7f 100644
--- a/foundry_sdk/__init__.py
+++ b/foundry_sdk/__init__.py
@@ -14,6 +14,7 @@
from foundry_sdk._core import ApiResponse
+from foundry_sdk._core import ArrowTableResponse
from foundry_sdk._core import AsyncApiResponse
from foundry_sdk._core import AsyncPageIterator
from foundry_sdk._core import AsyncResourceIterator
@@ -21,6 +22,7 @@
from foundry_sdk._core import ConfidentialClientAuth
from foundry_sdk._core import Config
from foundry_sdk._core import PageIterator
+from foundry_sdk._core import ParquetTableResponse
from foundry_sdk._core import PublicClientAuth
from foundry_sdk._core import ResourceIterator
from foundry_sdk._core import StreamedApiResponse
diff --git a/foundry_sdk/_core/__init__.py b/foundry_sdk/_core/__init__.py
index a44b536e..0146a293 100644
--- a/foundry_sdk/_core/__init__.py
+++ b/foundry_sdk/_core/__init__.py
@@ -43,6 +43,8 @@
) # NOQA
from foundry_sdk._core.resource_iterator import PageIterator as PageIterator
from foundry_sdk._core.resource_iterator import ResourceIterator as ResourceIterator
+from foundry_sdk._core.table import ArrowTableResponse as ArrowTableResponse
+from foundry_sdk._core.table import ParquetTableResponse as ParquetTableResponse
from foundry_sdk._core.table import TableResponse as TableResponse
from foundry_sdk._core.user_token_auth_client import UserTokenAuth as UserTokenAuth
from foundry_sdk._core.utils import RID as RID
diff --git a/foundry_sdk/_core/api_client.py b/foundry_sdk/_core/api_client.py
index 6b490b80..3fd15629 100644
--- a/foundry_sdk/_core/api_client.py
+++ b/foundry_sdk/_core/api_client.py
@@ -61,7 +61,8 @@
from foundry_sdk._core.http_client import HttpClient
from foundry_sdk._core.resource_iterator import AsyncResourceIterator
from foundry_sdk._core.resource_iterator import ResourceIterator
-from foundry_sdk._core.table import TableResponse
+from foundry_sdk._core.table import ArrowTableResponse
+from foundry_sdk._core.table import ParquetTableResponse
from foundry_sdk._core.utils import assert_non_empty_string
from foundry_sdk._errors import ApiNotFoundError
from foundry_sdk._errors import BadRequestError
@@ -162,7 +163,7 @@ def async_with_streaming_response(
)
-ResponseMode = Literal["DECODED", "ITERATOR", "RAW", "STREAMING", "TABLE"]
+ResponseMode = Literal["DECODED", "ITERATOR", "RAW", "STREAMING", "ARROW_TABLE", "PARQUET_TABLE"]
# The SdkInternal dictionary is a flexible way to pass additional information to the API client
@@ -715,11 +716,16 @@ def make_request(token: Token):
if response_mode == "STREAMING":
return StreamingContextManager(request_info, api_response)
- elif response_mode == "TABLE":
+ elif response_mode == "ARROW_TABLE":
if res.content == b"":
return None
else:
- return TableResponse(res.content)
+ return ArrowTableResponse(res.content)
+ elif response_mode == "PARQUET_TABLE":
+ if res.content == b"":
+ return None
+ else:
+ return ParquetTableResponse(res.content)
elif response_mode == "RAW":
return api_response
else:
@@ -813,11 +819,16 @@ async def make_request(token: Token):
if response_mode == "RAW" or response_mode == "STREAMING":
return api_response
- elif response_mode == "TABLE":
+ elif response_mode == "ARROW_TABLE":
+ if res.content == b"":
+ return None
+ else:
+ return ArrowTableResponse(res.content)
+ elif response_mode == "PARQUET_TABLE":
if res.content == b"":
return None
else:
- return TableResponse(res.content)
+ return ParquetTableResponse(res.content)
else:
return api_response.decode()
diff --git a/foundry_sdk/_core/table.py b/foundry_sdk/_core/table.py
index a51a4e10..0a2cd57a 100644
--- a/foundry_sdk/_core/table.py
+++ b/foundry_sdk/_core/table.py
@@ -69,6 +69,13 @@ def to_duckdb(self) -> "duckdb.DuckDBPyRelation":
except ImportError:
raise ImportError(_error_msg("duckdb", "a DuckDB relation", "duckdb"))
+ def _get_arrow_table(self, extra_dependency: str) -> "pa.Table":
+ raise NotImplementedError("Subclasses must implement _get_arrow_table")
+
+
+class ArrowTableResponse(TableResponse):
+ """Deserializes an Arrow IPC stream into various formats."""
+
def _get_arrow_table(self, extra_dependency: str) -> "pa.Table":
try:
import pyarrow as pa
@@ -79,3 +86,19 @@ def _get_arrow_table(self, extra_dependency: str) -> "pa.Table":
self._arrow_table = pa.ipc.open_stream(self).read_all()
return self._arrow_table
+
+
+class ParquetTableResponse(TableResponse):
+ """Deserializes Parquet bytes into various formats."""
+
+ def _get_arrow_table(self, extra_dependency: str) -> "pa.Table":
+ try:
+ import pyarrow as pa
+ import pyarrow.parquet as pq # type: ignore[import-untyped]
+ except ImportError:
+ raise ImportError(_error_msg("pyarrow", "an Arrow Table", extra_dependency))
+
+ if self._arrow_table is None:
+ self._arrow_table = pq.read_table(pa.BufferReader(self))
+
+ return self._arrow_table
diff --git a/foundry_sdk/_version.py b/foundry_sdk/_version.py
index 6506d423..345b12f3 100644
--- a/foundry_sdk/_version.py
+++ b/foundry_sdk/_version.py
@@ -17,4 +17,4 @@
# using the autorelease bot
__version__ = "0.0.0"
-__openapi_document_version__ = "1.1483.0"
+__openapi_document_version__ = "1.1485.0"
diff --git a/foundry_sdk/v1/core/models.py b/foundry_sdk/v1/core/models.py
index 640774db..b041122d 100644
--- a/foundry_sdk/v1/core/models.py
+++ b/foundry_sdk/v1/core/models.py
@@ -158,6 +158,12 @@ class MarkingType(core.ModelBase):
type: typing.Literal["marking"] = "marking"
+class MediaReferenceType(core.ModelBase):
+ """MediaReferenceType"""
+
+ type: typing.Literal["mediaReference"] = "mediaReference"
+
+
MediaType = str
"""
The [media type](https://www.iana.org/assignments/media-types/media-types.xhtml) of the file or attachment.
@@ -272,6 +278,7 @@ class UnsupportedType(core.ModelBase):
"IntegerType",
"LongType",
"MarkingType",
+ "MediaReferenceType",
"MediaType",
"NullType",
"OperationScope",
diff --git a/foundry_sdk/v1/ontologies/models.py b/foundry_sdk/v1/ontologies/models.py
index fd78dd1f..cf221aa5 100644
--- a/foundry_sdk/v1/ontologies/models.py
+++ b/foundry_sdk/v1/ontologies/models.py
@@ -776,6 +776,7 @@ class OntologyArrayType(core.ModelBase):
core_models.CipherTextType,
core_models.MarkingType,
core_models.UnsupportedType,
+ core_models.MediaReferenceType,
"OntologyArrayType",
"OntologyObjectSetType",
core_models.BinaryType,
@@ -1140,6 +1141,7 @@ class QueryArrayType(core.ModelBase):
core_models.BooleanType,
core_models.UnsupportedType,
core_models.AttachmentType,
+ core_models.MediaReferenceType,
core_models.NullType,
"QueryArrayType",
"OntologyObjectSetType",
diff --git a/foundry_sdk/v2/cli.py b/foundry_sdk/v2/cli.py
index b98a5666..0da149f1 100644
--- a/foundry_sdk/v2/cli.py
+++ b/foundry_sdk/v2/cli.py
@@ -5795,6 +5795,12 @@ def language_models_anthropic_model():
""",
)
@click.option("--attribution", type=str, required=False, help="""""")
+@click.option(
+ "--output_config",
+ type=str,
+ required=False,
+ help="""Configuration to control the shape of the model's output""",
+)
@click.option(
"--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
)
@@ -5854,6 +5860,7 @@ def language_models_anthropic_model_op_messages(
max_tokens: int,
messages: str,
attribution: typing.Optional[str],
+ output_config: typing.Optional[str],
preview: typing.Optional[bool],
stop_sequences: typing.Optional[str],
system: typing.Optional[str],
@@ -5870,6 +5877,7 @@ def language_models_anthropic_model_op_messages(
max_tokens=max_tokens,
messages=json.loads(messages),
attribution=attribution,
+ output_config=None if output_config is None else json.loads(output_config),
preview=preview,
stop_sequences=None if stop_sequences is None else json.loads(stop_sequences),
system=None if system is None else json.loads(system),
@@ -6990,6 +6998,285 @@ def models_model_model_version_op_list(
click.echo(repr(result))
+@models_model.group("experiment")
+def models_model_experiment():
+ pass
+
+
+@models_model_experiment.command("get")
+@click.argument("model_rid", type=str, required=True)
+@click.argument("experiment_rid", type=str, required=True)
+@click.option(
+ "--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
+)
+@click.pass_obj
+def models_model_experiment_op_get(
+ client: FoundryClient,
+ model_rid: str,
+ experiment_rid: str,
+ preview: typing.Optional[bool],
+):
+ """
+ Retrieve a single experiment with all metadata, parameters, series metadata, and summary metrics.
+
+ """
+ result = client.models.Model.Experiment.get(
+ model_rid=model_rid,
+ experiment_rid=experiment_rid,
+ preview=preview,
+ )
+ click.echo(repr(result))
+
+
+@models_model_experiment.command("search")
+@click.argument("model_rid", type=str, required=True)
+@click.option(
+ "--order_by",
+ type=str,
+ required=False,
+ help="""The field to sort by. Default is to sort by relevance.""",
+)
+@click.option(
+ "--page_size",
+ type=int,
+ required=False,
+ help="""The maximum number of results to return. Default 50, maximum of 100.""",
+)
+@click.option(
+ "--page_token",
+ type=str,
+ required=False,
+ help="""PageToken to identify the next page to retrieve. Leave empty for the first request.""",
+)
+@click.option(
+ "--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
+)
+@click.option(
+ "--where",
+ type=str,
+ required=False,
+ help="""Optional search filter for filtering experiments. If not provided, all experiments for the model are returned.""",
+)
+@click.pass_obj
+def models_model_experiment_op_search(
+ client: FoundryClient,
+ model_rid: str,
+ order_by: typing.Optional[str],
+ page_size: typing.Optional[int],
+ page_token: typing.Optional[str],
+ preview: typing.Optional[bool],
+ where: typing.Optional[str],
+):
+ """
+ Search experiments using complex nested queries on experiment metadata, parameters, series,
+ and summary metrics. Supports AND/OR/NOT combinations and various predicates.
+ Returns a maximum of 100 results per page.
+
+ """
+ result = client.models.Model.Experiment.search(
+ model_rid=model_rid,
+ order_by=None if order_by is None else json.loads(order_by),
+ page_size=page_size,
+ page_token=page_token,
+ preview=preview,
+ where=None if where is None else json.loads(where),
+ )
+ click.echo(repr(result))
+
+
+@models_model_experiment.group("experiment_artifact_table")
+def models_model_experiment_experiment_artifact_table():
+ pass
+
+
+@models_model_experiment_experiment_artifact_table.command("json")
+@click.argument("model_rid", type=str, required=True)
+@click.argument("experiment_rid", type=str, required=True)
+@click.argument("experiment_artifact_table_name", type=str, required=True)
+@click.option(
+ "--offset",
+ type=int,
+ required=False,
+ help="""Number of rows to skip from the beginning. Defaults to 0.""",
+)
+@click.option(
+ "--page_size",
+ type=int,
+ required=False,
+ help="""Maximum number of rows to return. Default is 10, maximum is 100.""",
+)
+@click.option(
+ "--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
+)
+@click.pass_obj
+def models_model_experiment_experiment_artifact_table_op_json(
+ client: FoundryClient,
+ model_rid: str,
+ experiment_rid: str,
+ experiment_artifact_table_name: str,
+ offset: typing.Optional[int],
+ page_size: typing.Optional[int],
+ preview: typing.Optional[bool],
+):
+ """
+ Read table data from an experiment artifact as a streamed binary response containing JSON.
+ The response body is a JSON array of row objects, where each object maps column names to values.
+ Results are paginated by row count with a default page size of 10 and a maximum of 100.
+
+ """
+ result = client.models.Model.Experiment.ArtifactTable.json(
+ model_rid=model_rid,
+ experiment_rid=experiment_rid,
+ experiment_artifact_table_name=experiment_artifact_table_name,
+ offset=offset,
+ page_size=page_size,
+ preview=preview,
+ )
+ click.echo(result)
+
+
+@models_model_experiment_experiment_artifact_table.command("parquet")
+@click.argument("model_rid", type=str, required=True)
+@click.argument("experiment_rid", type=str, required=True)
+@click.argument("experiment_artifact_table_name", type=str, required=True)
+@click.option(
+ "--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
+)
+@click.pass_obj
+def models_model_experiment_experiment_artifact_table_op_parquet(
+ client: FoundryClient,
+ model_rid: str,
+ experiment_rid: str,
+ experiment_artifact_table_name: str,
+ preview: typing.Optional[bool],
+):
+ """
+ Read raw table data from experiment artifacts in Parquet format.
+
+ """
+ result = client.models.Model.Experiment.ArtifactTable.parquet(
+ model_rid=model_rid,
+ experiment_rid=experiment_rid,
+ experiment_artifact_table_name=experiment_artifact_table_name,
+ preview=preview,
+ )
+ click.echo(result)
+
+
+@models_model_experiment.group("experiment_series")
+def models_model_experiment_experiment_series():
+ pass
+
+
+@models_model_experiment_experiment_series.command("json")
+@click.argument("model_rid", type=str, required=True)
+@click.argument("experiment_rid", type=str, required=True)
+@click.argument("experiment_series_name", type=str, required=True)
+@click.option(
+ "--offset",
+ type=int,
+ required=False,
+ help="""Number of values to skip from the beginning. Defaults to 0.""",
+)
+@click.option(
+ "--page_size",
+ type=int,
+ required=False,
+ help="""Maximum number of values to return per page. Default is 200, maximum is 1000.""",
+)
+@click.option(
+ "--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
+)
+@click.pass_obj
+def models_model_experiment_experiment_series_op_json(
+ client: FoundryClient,
+ model_rid: str,
+ experiment_rid: str,
+ experiment_series_name: str,
+ offset: typing.Optional[int],
+ page_size: typing.Optional[int],
+ preview: typing.Optional[bool],
+):
+ """
+ Retrieve raw time-series data for a single series in JSON format.
+ Results are paginated with a default page size of 200 and a maximum of 1000.
+
+ """
+ result = client.models.Model.Experiment.Series.json(
+ model_rid=model_rid,
+ experiment_rid=experiment_rid,
+ experiment_series_name=experiment_series_name,
+ offset=offset,
+ page_size=page_size,
+ preview=preview,
+ )
+ click.echo(repr(result))
+
+
+@models_model_experiment_experiment_series.command("parquet")
+@click.argument("model_rid", type=str, required=True)
+@click.argument("experiment_rid", type=str, required=True)
+@click.argument("experiment_series_name", type=str, required=True)
+@click.option(
+ "--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
+)
+@click.pass_obj
+def models_model_experiment_experiment_series_op_parquet(
+ client: FoundryClient,
+ model_rid: str,
+ experiment_rid: str,
+ experiment_series_name: str,
+ preview: typing.Optional[bool],
+):
+ """
+ Retrieve raw time-series data for a single series as a streamed binary response in Apache Parquet format.
+
+ """
+ result = client.models.Model.Experiment.Series.parquet(
+ model_rid=model_rid,
+ experiment_rid=experiment_rid,
+ experiment_series_name=experiment_series_name,
+ preview=preview,
+ )
+ click.echo(result)
+
+
+@models.group("live_deployment")
+def models_live_deployment():
+ pass
+
+
+@models_live_deployment.command("transform_json")
+@click.argument("live_deployment_rid", type=str, required=True)
+@click.option(
+ "--input",
+ type=str,
+ required=True,
+ help="""The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data.
+""",
+)
+@click.option(
+ "--preview", type=bool, required=False, help="""Enables the use of preview functionality."""
+)
+@click.pass_obj
+def models_live_deployment_op_transform_json(
+ client: FoundryClient,
+ live_deployment_rid: str,
+ input: str,
+ preview: typing.Optional[bool],
+):
+ """
+ Performs inference on the live deployment.
+
+ """
+ result = client.models.LiveDeployment.transform_json(
+ live_deployment_rid=live_deployment_rid,
+ input=json.loads(input),
+ preview=preview,
+ )
+ click.echo(repr(result))
+
+
@cli.group("ontologies")
def ontologies():
pass
diff --git a/foundry_sdk/v2/datasets/dataset.py b/foundry_sdk/v2/datasets/dataset.py
index 0a243472..9d454c47 100644
--- a/foundry_sdk/v2/datasets/dataset.py
+++ b/foundry_sdk/v2/datasets/dataset.py
@@ -740,7 +740,7 @@ def read_table(
"ReadTableTimeout": datasets_errors.ReadTableTimeout,
"SchemaNotFound": datasets_errors.SchemaNotFound,
},
- response_mode=_sdk_internal.get("response_mode", "TABLE"),
+ response_mode=_sdk_internal.get("response_mode", "ARROW_TABLE"),
),
)
@@ -1575,7 +1575,7 @@ def read_table(
"ReadTableTimeout": datasets_errors.ReadTableTimeout,
"SchemaNotFound": datasets_errors.SchemaNotFound,
},
- response_mode=_sdk_internal.get("response_mode", "TABLE"),
+ response_mode=_sdk_internal.get("response_mode", "ARROW_TABLE"),
),
)
diff --git a/foundry_sdk/v2/functions/models.py b/foundry_sdk/v2/functions/models.py
index a3b4cc89..abe69e6b 100644
--- a/foundry_sdk/v2/functions/models.py
+++ b/foundry_sdk/v2/functions/models.py
@@ -236,6 +236,7 @@ class QueryArrayType(core.ModelBase):
core_models.BooleanType,
core_models.UnsupportedType,
core_models.AttachmentType,
+ core_models.MediaReferenceType,
core_models.NullType,
"QueryArrayType",
"TwoDimensionalAggregation",
diff --git a/foundry_sdk/v2/language_models/anthropic_model.py b/foundry_sdk/v2/language_models/anthropic_model.py
index 2d69600e..584b46f8 100644
--- a/foundry_sdk/v2/language_models/anthropic_model.py
+++ b/foundry_sdk/v2/language_models/anthropic_model.py
@@ -58,6 +58,7 @@ def messages(
max_tokens: int,
messages: typing.List[language_models_models.AnthropicMessage],
attribution: typing.Optional[core_models.Attribution] = None,
+ output_config: typing.Optional[language_models_models.AnthropicOutputConfig] = None,
preview: typing.Optional[core_models.PreviewMode] = None,
stop_sequences: typing.Optional[typing.List[str]] = None,
system: typing.Optional[typing.List[language_models_models.AnthropicSystemMessage]] = None,
@@ -80,6 +81,8 @@ def messages(
:type messages: List[AnthropicMessage]
:param attribution:
:type attribution: Optional[Attribution]
+ :param output_config: Configuration to control the shape of the model's output
+ :type output_config: Optional[AnthropicOutputConfig]
:param preview: Enables the use of preview functionality.
:type preview: Optional[PreviewMode]
:param stop_sequences: Custom text sequences that will cause the model to stop generating.
@@ -134,6 +137,7 @@ def messages(
tools=tools,
top_k=top_k,
top_p=top_p,
+ output_config=output_config,
),
response_type=language_models_models.AnthropicMessagesResponse,
request_timeout=request_timeout,
@@ -194,6 +198,7 @@ def messages(
max_tokens: int,
messages: typing.List[language_models_models.AnthropicMessage],
attribution: typing.Optional[core_models.Attribution] = None,
+ output_config: typing.Optional[language_models_models.AnthropicOutputConfig] = None,
preview: typing.Optional[core_models.PreviewMode] = None,
stop_sequences: typing.Optional[typing.List[str]] = None,
system: typing.Optional[typing.List[language_models_models.AnthropicSystemMessage]] = None,
@@ -216,6 +221,8 @@ def messages(
:type messages: List[AnthropicMessage]
:param attribution:
:type attribution: Optional[Attribution]
+ :param output_config: Configuration to control the shape of the model's output
+ :type output_config: Optional[AnthropicOutputConfig]
:param preview: Enables the use of preview functionality.
:type preview: Optional[PreviewMode]
:param stop_sequences: Custom text sequences that will cause the model to stop generating.
@@ -270,6 +277,7 @@ def messages(
tools=tools,
top_k=top_k,
top_p=top_p,
+ output_config=output_config,
),
response_type=language_models_models.AnthropicMessagesResponse,
request_timeout=request_timeout,
diff --git a/foundry_sdk/v2/language_models/models.py b/foundry_sdk/v2/language_models/models.py
index 1db1dd20..00c5d2c1 100644
--- a/foundry_sdk/v2/language_models/models.py
+++ b/foundry_sdk/v2/language_models/models.py
@@ -145,6 +145,16 @@ class AnthropicDocumentCitations(core.ModelBase):
"""AnthropicDocumentSource"""
+AnthropicEffort = typing.Literal["LOW", "MEDIUM", "HIGH", "MAX"]
+"""
+https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#effort
+
+Controls how many tokens Claude uses when responding.
+Supported by Claude models beginning with Opus 4.5.
+Setting effort to HIGH produces the same behavior as omitting the parameter entirely.
+"""
+
+
class AnthropicEnabledThinking(core.ModelBase):
"""AnthropicEnabledThinking"""
@@ -178,6 +188,13 @@ class AnthropicImageBase64Source(core.ModelBase):
type: typing.Literal["base64"] = "base64"
+class AnthropicJsonSchemaOutputFormat(core.ModelBase):
+ """AnthropicJsonSchemaOutputFormat"""
+
+ schema_: JsonSchema = pydantic.Field(alias=str("schema")) # type: ignore[literal-required]
+ type: typing.Literal["jsonSchema"] = "jsonSchema"
+
+
AnthropicMediaType = typing.Literal["IMAGE_JPEG", "IMAGE_PNG", "IMAGE_GIF", "IMAGE_WEBP"]
"""AnthropicMediaType"""
@@ -250,6 +267,9 @@ class AnthropicMessagesRequest(core.ModelBase):
top_p: typing.Optional[float] = pydantic.Field(alias=str("topP"), default=None) # type: ignore[literal-required]
"""Use nucleus sampling. You should either alter temperature or top_p, but not both"""
+ output_config: typing.Optional[AnthropicOutputConfig] = pydantic.Field(alias=str("outputConfig"), default=None) # type: ignore[literal-required]
+ """Configuration to control the shape of the model's output"""
+
class AnthropicMessagesResponse(core.ModelBase):
"""AnthropicMessagesResponse"""
@@ -269,6 +289,13 @@ class AnthropicNoneToolChoice(core.ModelBase):
type: typing.Literal["none"] = "none"
+class AnthropicOutputConfig(core.ModelBase):
+ """AnthropicOutputConfig"""
+
+ format: typing.Optional[AnthropicOutputFormat] = None
+ effort: typing.Optional[AnthropicEffort] = None
+
+
class AnthropicRedactedThinking(core.ModelBase):
"""AnthropicRedactedThinking"""
@@ -423,6 +450,10 @@ class OpenAiEmbeddingsResponse(core.ModelBase):
"""AnthropicImageSource"""
+AnthropicOutputFormat = AnthropicJsonSchemaOutputFormat
+"""AnthropicOutputFormat"""
+
+
AnthropicSystemMessage = AnthropicText
"""AnthropicSystemMessage"""
@@ -461,11 +492,13 @@ class OpenAiEmbeddingsResponse(core.ModelBase):
"AnthropicDocument",
"AnthropicDocumentCitations",
"AnthropicDocumentSource",
+ "AnthropicEffort",
"AnthropicEnabledThinking",
"AnthropicEphemeralCacheControl",
"AnthropicImage",
"AnthropicImageBase64Source",
"AnthropicImageSource",
+ "AnthropicJsonSchemaOutputFormat",
"AnthropicMediaType",
"AnthropicMessage",
"AnthropicMessageContent",
@@ -473,6 +506,8 @@ class OpenAiEmbeddingsResponse(core.ModelBase):
"AnthropicMessagesRequest",
"AnthropicMessagesResponse",
"AnthropicNoneToolChoice",
+ "AnthropicOutputConfig",
+ "AnthropicOutputFormat",
"AnthropicRedactedThinking",
"AnthropicSystemMessage",
"AnthropicText",
diff --git a/foundry_sdk/v2/models/_client.py b/foundry_sdk/v2/models/_client.py
index cb9f2762..2ef844ea 100644
--- a/foundry_sdk/v2/models/_client.py
+++ b/foundry_sdk/v2/models/_client.py
@@ -38,6 +38,16 @@ def __init__(
self._hostname = hostname
self._config = config
+ @cached_property
+ def LiveDeployment(self):
+ from foundry_sdk.v2.models.live_deployment import LiveDeploymentClient
+
+ return LiveDeploymentClient(
+ auth=self._auth,
+ hostname=self._hostname,
+ config=self._config,
+ )
+
@cached_property
def Model(self):
from foundry_sdk.v2.models.model import ModelClient
@@ -84,10 +94,13 @@ def __init__(
hostname: str,
config: typing.Optional[core.Config] = None,
):
+ from foundry_sdk.v2.models.live_deployment import AsyncLiveDeploymentClient
from foundry_sdk.v2.models.model import AsyncModelClient
from foundry_sdk.v2.models.model_studio import AsyncModelStudioClient
from foundry_sdk.v2.models.model_studio_trainer import AsyncModelStudioTrainerClient # NOQA
+ self.LiveDeployment = AsyncLiveDeploymentClient(auth=auth, hostname=hostname, config=config)
+
self.Model = AsyncModelClient(auth=auth, hostname=hostname, config=config)
self.ModelStudio = AsyncModelStudioClient(auth=auth, hostname=hostname, config=config)
diff --git a/foundry_sdk/v2/models/errors.py b/foundry_sdk/v2/models/errors.py
index 566f9cff..f88596c0 100644
--- a/foundry_sdk/v2/models/errors.py
+++ b/foundry_sdk/v2/models/errors.py
@@ -18,6 +18,7 @@
import typing_extensions
+from foundry_sdk import _core as core
from foundry_sdk import _errors as errors
from foundry_sdk.v2.models import models as models_models
@@ -110,6 +111,113 @@ class CreateModelVersionPermissionDenied(errors.PermissionDeniedError):
error_instance_id: str
+class ExperimentArtifactNotFoundParameters(typing_extensions.TypedDict):
+ """The requested artifact was not found in the experiment."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ modelRid: core.RID
+ experimentRid: core.RID
+ artifactName: str
+
+
+@dataclass
+class ExperimentArtifactNotFound(errors.NotFoundError):
+ name: typing.Literal["ExperimentArtifactNotFound"]
+ parameters: ExperimentArtifactNotFoundParameters
+ error_instance_id: str
+
+
+class ExperimentNotFoundParameters(typing_extensions.TypedDict):
+ """The given Experiment could not be found."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ experimentRid: models_models.ExperimentRid
+ modelRid: models_models.ModelRid
+
+
+@dataclass
+class ExperimentNotFound(errors.NotFoundError):
+ name: typing.Literal["ExperimentNotFound"]
+ parameters: ExperimentNotFoundParameters
+ error_instance_id: str
+
+
+class ExperimentSeriesNotFoundParameters(typing_extensions.TypedDict):
+ """The requested series was not found in the experiment."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ modelRid: core.RID
+ experimentRid: core.RID
+ seriesName: str
+
+
+@dataclass
+class ExperimentSeriesNotFound(errors.NotFoundError):
+ name: typing.Literal["ExperimentSeriesNotFound"]
+ parameters: ExperimentSeriesNotFoundParameters
+ error_instance_id: str
+
+
+class InferenceFailureParameters(typing_extensions.TypedDict):
+ """
+ The inference request failed due to a model execution error or unexpected internal issue.
+ This typically indicates a problem with the model itself rather than the input data.
+ """
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ liveDeploymentRid: models_models.LiveDeploymentRid
+ errorMessage: str
+
+
+@dataclass
+class InferenceFailure(errors.BadRequestError):
+ name: typing.Literal["InferenceFailure"]
+ parameters: InferenceFailureParameters
+ error_instance_id: str
+
+
+class InferenceInvalidInputParameters(typing_extensions.TypedDict):
+ """
+ The inference request contains invalid input data that does not match the model's API specification.
+ Check the error type for specific validation failure details.
+ """
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ liveDeploymentRid: models_models.LiveDeploymentRid
+ errorType: models_models.InferenceInputErrorType
+ """The specific type and details of the input validation error"""
+
+
+@dataclass
+class InferenceInvalidInput(errors.BadRequestError):
+ name: typing.Literal["InferenceInvalidInput"]
+ parameters: InferenceInvalidInputParameters
+ error_instance_id: str
+
+
+class InferenceTimeoutParameters(typing_extensions.TypedDict):
+ """
+ The live deployment took longer than 5 minutes to respond to the inference request.
+ This typically indicates the model execution is taking too long or the deployment is under heavy load.
+ """
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ liveDeploymentRid: models_models.LiveDeploymentRid
+
+
+@dataclass
+class InferenceTimeout(errors.InternalServerError):
+ name: typing.Literal["InferenceTimeout"]
+ parameters: InferenceTimeoutParameters
+ error_instance_id: str
+
+
class InvalidModelApiParameters(typing_extensions.TypedDict):
"""The model api failed validations"""
@@ -139,6 +247,40 @@ class InvalidModelStudioCreateRequest(errors.BadRequestError):
error_instance_id: str
+class JsonExperimentArtifactTablePermissionDeniedParameters(typing_extensions.TypedDict):
+ """Could not json the ExperimentArtifactTable."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ experimentRid: models_models.ExperimentRid
+ experimentArtifactTableName: models_models.ExperimentArtifactName
+ modelRid: models_models.ModelRid
+
+
+@dataclass
+class JsonExperimentArtifactTablePermissionDenied(errors.PermissionDeniedError):
+ name: typing.Literal["JsonExperimentArtifactTablePermissionDenied"]
+ parameters: JsonExperimentArtifactTablePermissionDeniedParameters
+ error_instance_id: str
+
+
+class JsonExperimentSeriesPermissionDeniedParameters(typing_extensions.TypedDict):
+ """Could not json the ExperimentSeries."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ experimentSeriesName: models_models.SeriesName
+ experimentRid: models_models.ExperimentRid
+ modelRid: models_models.ModelRid
+
+
+@dataclass
+class JsonExperimentSeriesPermissionDenied(errors.PermissionDeniedError):
+ name: typing.Literal["JsonExperimentSeriesPermissionDenied"]
+ parameters: JsonExperimentSeriesPermissionDeniedParameters
+ error_instance_id: str
+
+
class LatestModelStudioConfigVersionsPermissionDeniedParameters(typing_extensions.TypedDict):
"""Could not latest the ModelStudioConfigVersion."""
@@ -169,6 +311,37 @@ class LaunchModelStudioPermissionDenied(errors.PermissionDeniedError):
error_instance_id: str
+class LiveDeploymentNotFoundParameters(typing_extensions.TypedDict):
+ """The specified live deployment was not found."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ liveDeploymentRid: models_models.LiveDeploymentRid
+
+
+@dataclass
+class LiveDeploymentNotFound(errors.NotFoundError):
+ name: typing.Literal["LiveDeploymentNotFound"]
+ parameters: LiveDeploymentNotFoundParameters
+ error_instance_id: str
+
+
+class ModelExperimentNotFoundParameters(typing_extensions.TypedDict):
+ """The requested experiment was not found or the user lacks permission to access it."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ modelRid: core.RID
+ experimentRid: core.RID
+
+
+@dataclass
+class ModelExperimentNotFound(errors.NotFoundError):
+ name: typing.Literal["ModelExperimentNotFound"]
+ parameters: ModelExperimentNotFoundParameters
+ error_instance_id: str
+
+
class ModelNotFoundParameters(typing_extensions.TypedDict):
"""The given Model could not be found."""
@@ -246,6 +419,55 @@ class ModelVersionNotFound(errors.NotFoundError):
error_instance_id: str
+class ParquetExperimentArtifactTablePermissionDeniedParameters(typing_extensions.TypedDict):
+ """Could not parquet the ExperimentArtifactTable."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ experimentRid: models_models.ExperimentRid
+ experimentArtifactTableName: models_models.ExperimentArtifactName
+ modelRid: models_models.ModelRid
+
+
+@dataclass
+class ParquetExperimentArtifactTablePermissionDenied(errors.PermissionDeniedError):
+ name: typing.Literal["ParquetExperimentArtifactTablePermissionDenied"]
+ parameters: ParquetExperimentArtifactTablePermissionDeniedParameters
+ error_instance_id: str
+
+
+class ParquetExperimentSeriesPermissionDeniedParameters(typing_extensions.TypedDict):
+ """Could not parquet the ExperimentSeries."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ experimentSeriesName: models_models.SeriesName
+ experimentRid: models_models.ExperimentRid
+ modelRid: models_models.ModelRid
+
+
+@dataclass
+class ParquetExperimentSeriesPermissionDenied(errors.PermissionDeniedError):
+ name: typing.Literal["ParquetExperimentSeriesPermissionDenied"]
+ parameters: ParquetExperimentSeriesPermissionDeniedParameters
+ error_instance_id: str
+
+
+class SearchExperimentsPermissionDeniedParameters(typing_extensions.TypedDict):
+ """Could not search the Experiment."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ modelRid: models_models.ModelRid
+
+
+@dataclass
+class SearchExperimentsPermissionDenied(errors.PermissionDeniedError):
+ name: typing.Literal["SearchExperimentsPermissionDenied"]
+ parameters: SearchExperimentsPermissionDeniedParameters
+ error_instance_id: str
+
+
class TrainerNotFoundParameters(typing_extensions.TypedDict):
"""The specified trainer does not exist."""
@@ -261,6 +483,21 @@ class TrainerNotFound(errors.NotFoundError):
error_instance_id: str
+class TransformJsonLiveDeploymentPermissionDeniedParameters(typing_extensions.TypedDict):
+ """Could not transformJson the LiveDeployment."""
+
+ __pydantic_config__ = {"extra": "allow"} # type: ignore
+
+ liveDeploymentRid: models_models.LiveDeploymentRid
+
+
+@dataclass
+class TransformJsonLiveDeploymentPermissionDenied(errors.PermissionDeniedError):
+ name: typing.Literal["TransformJsonLiveDeploymentPermissionDenied"]
+ parameters: TransformJsonLiveDeploymentPermissionDeniedParameters
+ error_instance_id: str
+
+
__all__ = [
"CondaSolveFailureForProvidedPackages",
"CreateConfigValidationError",
@@ -268,14 +505,28 @@ class TrainerNotFound(errors.NotFoundError):
"CreateModelStudioConfigVersionPermissionDenied",
"CreateModelStudioPermissionDenied",
"CreateModelVersionPermissionDenied",
+ "ExperimentArtifactNotFound",
+ "ExperimentNotFound",
+ "ExperimentSeriesNotFound",
+ "InferenceFailure",
+ "InferenceInvalidInput",
+ "InferenceTimeout",
"InvalidModelApi",
"InvalidModelStudioCreateRequest",
+ "JsonExperimentArtifactTablePermissionDenied",
+ "JsonExperimentSeriesPermissionDenied",
"LatestModelStudioConfigVersionsPermissionDenied",
"LaunchModelStudioPermissionDenied",
+ "LiveDeploymentNotFound",
+ "ModelExperimentNotFound",
"ModelNotFound",
"ModelStudioConfigVersionNotFound",
"ModelStudioNotFound",
"ModelStudioTrainerNotFound",
"ModelVersionNotFound",
+ "ParquetExperimentArtifactTablePermissionDenied",
+ "ParquetExperimentSeriesPermissionDenied",
+ "SearchExperimentsPermissionDenied",
"TrainerNotFound",
+ "TransformJsonLiveDeploymentPermissionDenied",
]
diff --git a/foundry_sdk/v2/models/experiment.py b/foundry_sdk/v2/models/experiment.py
new file mode 100644
index 00000000..924ce3f5
--- /dev/null
+++ b/foundry_sdk/v2/models/experiment.py
@@ -0,0 +1,398 @@
+# Copyright 2024 Palantir Technologies, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import typing
+from functools import cached_property
+
+import pydantic
+import typing_extensions
+
+from foundry_sdk import _core as core
+from foundry_sdk import _errors as errors
+from foundry_sdk.v2.core import models as core_models
+from foundry_sdk.v2.models import errors as models_errors
+from foundry_sdk.v2.models import models as models_models
+
+
+class ExperimentClient:
+ """
+ The API client for the Experiment Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.ApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _ExperimentClientStreaming(self)
+ self.with_raw_response = _ExperimentClientRaw(self)
+
+ @cached_property
+ def Series(self):
+ from foundry_sdk.v2.models.experiment_series import ExperimentSeriesClient
+
+ return ExperimentSeriesClient(
+ auth=self._auth,
+ hostname=self._hostname,
+ config=self._config,
+ )
+
+ @cached_property
+ def ArtifactTable(self):
+ from foundry_sdk.v2.models.experiment_artifact_table import (
+ ExperimentArtifactTableClient,
+ ) # NOQA
+
+ return ExperimentArtifactTableClient(
+ auth=self._auth,
+ hostname=self._hostname,
+ config=self._config,
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def get(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ *,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> models_models.Experiment:
+ """
+ Retrieve a single experiment with all metadata, parameters, series metadata, and summary metrics.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: models_models.Experiment
+
+ :raises ExperimentNotFound: The given Experiment could not be found.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ },
+ header_params={
+ "Accept": "application/json",
+ },
+ body=None,
+ response_type=models_models.Experiment,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "ExperimentNotFound": models_errors.ExperimentNotFound,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def search(
+ self,
+ model_rid: models_models.ModelRid,
+ *,
+ order_by: typing.Optional[models_models.SearchExperimentsOrderBy] = None,
+ page_size: typing.Optional[core_models.PageSize] = None,
+ page_token: typing.Optional[core_models.PageToken] = None,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ where: typing.Optional[models_models.SearchExperimentsFilter] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> models_models.SearchExperimentsResponse:
+ """
+ Search experiments using complex nested queries on experiment metadata, parameters, series,
+ and summary metrics. Supports AND/OR/NOT combinations and various predicates.
+ Returns a maximum of 100 results per page.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param order_by: The field to sort by. Default is to sort by relevance.
+ :type order_by: Optional[SearchExperimentsOrderBy]
+ :param page_size: The maximum number of results to return. Default 50, maximum of 100.
+ :type page_size: Optional[PageSize]
+ :param page_token: PageToken to identify the next page to retrieve. Leave empty for the first request.
+ :type page_token: Optional[PageToken]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param where: Optional search filter for filtering experiments. If not provided, all experiments for the model are returned.
+ :type where: Optional[SearchExperimentsFilter]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: models_models.SearchExperimentsResponse
+
+ :raises SearchExperimentsPermissionDenied: Could not search the Experiment.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="POST",
+ resource_path="/v2/models/{modelRid}/experiments/search",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ },
+ header_params={
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ },
+ body=models_models.SearchExperimentsRequest(
+ where=where,
+ order_by=order_by,
+ page_size=page_size,
+ page_token=page_token,
+ ),
+ response_type=models_models.SearchExperimentsResponse,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "SearchExperimentsPermissionDenied": models_errors.SearchExperimentsPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+
+class _ExperimentClientRaw:
+ def __init__(self, client: ExperimentClient) -> None:
+ def get(_: models_models.Experiment): ...
+ def search(_: models_models.SearchExperimentsResponse): ...
+
+ self.get = core.with_raw_response(get, client.get)
+ self.search = core.with_raw_response(search, client.search)
+
+
+class _ExperimentClientStreaming:
+ def __init__(self, client: ExperimentClient) -> None:
+ def get(_: models_models.Experiment): ...
+ def search(_: models_models.SearchExperimentsResponse): ...
+
+ self.get = core.with_streaming_response(get, client.get)
+ self.search = core.with_streaming_response(search, client.search)
+
+
+class AsyncExperimentClient:
+ """
+ The API client for the Experiment Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.AsyncApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _AsyncExperimentClientStreaming(self)
+ self.with_raw_response = _AsyncExperimentClientRaw(self)
+
+ @cached_property
+ def Series(self):
+ from foundry_sdk.v2.models.experiment_series import AsyncExperimentSeriesClient
+
+ return AsyncExperimentSeriesClient(
+ auth=self._auth,
+ hostname=self._hostname,
+ config=self._config,
+ )
+
+ @cached_property
+ def ArtifactTable(self):
+ from foundry_sdk.v2.models.experiment_artifact_table import (
+ AsyncExperimentArtifactTableClient,
+ ) # NOQA
+
+ return AsyncExperimentArtifactTableClient(
+ auth=self._auth,
+ hostname=self._hostname,
+ config=self._config,
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def get(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ *,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> typing.Awaitable[models_models.Experiment]:
+ """
+ Retrieve a single experiment with all metadata, parameters, series metadata, and summary metrics.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: typing.Awaitable[models_models.Experiment]
+
+ :raises ExperimentNotFound: The given Experiment could not be found.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ },
+ header_params={
+ "Accept": "application/json",
+ },
+ body=None,
+ response_type=models_models.Experiment,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "ExperimentNotFound": models_errors.ExperimentNotFound,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def search(
+ self,
+ model_rid: models_models.ModelRid,
+ *,
+ order_by: typing.Optional[models_models.SearchExperimentsOrderBy] = None,
+ page_size: typing.Optional[core_models.PageSize] = None,
+ page_token: typing.Optional[core_models.PageToken] = None,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ where: typing.Optional[models_models.SearchExperimentsFilter] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> typing.Awaitable[models_models.SearchExperimentsResponse]:
+ """
+ Search experiments using complex nested queries on experiment metadata, parameters, series,
+ and summary metrics. Supports AND/OR/NOT combinations and various predicates.
+ Returns a maximum of 100 results per page.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param order_by: The field to sort by. Default is to sort by relevance.
+ :type order_by: Optional[SearchExperimentsOrderBy]
+ :param page_size: The maximum number of results to return. Default 50, maximum of 100.
+ :type page_size: Optional[PageSize]
+ :param page_token: PageToken to identify the next page to retrieve. Leave empty for the first request.
+ :type page_token: Optional[PageToken]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param where: Optional search filter for filtering experiments. If not provided, all experiments for the model are returned.
+ :type where: Optional[SearchExperimentsFilter]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: typing.Awaitable[models_models.SearchExperimentsResponse]
+
+ :raises SearchExperimentsPermissionDenied: Could not search the Experiment.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="POST",
+ resource_path="/v2/models/{modelRid}/experiments/search",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ },
+ header_params={
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ },
+ body=models_models.SearchExperimentsRequest(
+ where=where,
+ order_by=order_by,
+ page_size=page_size,
+ page_token=page_token,
+ ),
+ response_type=models_models.SearchExperimentsResponse,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "SearchExperimentsPermissionDenied": models_errors.SearchExperimentsPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+
+class _AsyncExperimentClientRaw:
+ def __init__(self, client: AsyncExperimentClient) -> None:
+ def get(_: models_models.Experiment): ...
+ def search(_: models_models.SearchExperimentsResponse): ...
+
+ self.get = core.async_with_raw_response(get, client.get)
+ self.search = core.async_with_raw_response(search, client.search)
+
+
+class _AsyncExperimentClientStreaming:
+ def __init__(self, client: AsyncExperimentClient) -> None:
+ def get(_: models_models.Experiment): ...
+ def search(_: models_models.SearchExperimentsResponse): ...
+
+ self.get = core.async_with_streaming_response(get, client.get)
+ self.search = core.async_with_streaming_response(search, client.search)
diff --git a/foundry_sdk/v2/models/experiment_artifact_table.py b/foundry_sdk/v2/models/experiment_artifact_table.py
new file mode 100644
index 00000000..fa0611b2
--- /dev/null
+++ b/foundry_sdk/v2/models/experiment_artifact_table.py
@@ -0,0 +1,359 @@
+# Copyright 2024 Palantir Technologies, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import typing
+
+import pydantic
+import typing_extensions
+
+from foundry_sdk import _core as core
+from foundry_sdk import _errors as errors
+from foundry_sdk.v2.core import models as core_models
+from foundry_sdk.v2.models import errors as models_errors
+from foundry_sdk.v2.models import models as models_models
+
+
+class ExperimentArtifactTableClient:
+ """
+ The API client for the ExperimentArtifactTable Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.ApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _ExperimentArtifactTableClientStreaming(self)
+ self.with_raw_response = _ExperimentArtifactTableClientRaw(self)
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def json(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_artifact_table_name: models_models.ExperimentArtifactName,
+ *,
+ offset: typing.Optional[int] = None,
+ page_size: typing.Optional[core_models.PageSize] = None,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> bytes:
+ """
+ Read table data from an experiment artifact as a streamed binary response containing JSON.
+ The response body is a JSON array of row objects, where each object maps column names to values.
+ Results are paginated by row count with a default page size of 10 and a maximum of 100.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_artifact_table_name:
+ :type experiment_artifact_table_name: ExperimentArtifactName
+ :param offset: Number of rows to skip from the beginning. Defaults to 0.
+ :type offset: Optional[int]
+ :param page_size: Maximum number of rows to return. Default is 10, maximum is 100.
+ :type page_size: Optional[PageSize]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: bytes
+
+ :raises JsonExperimentArtifactTablePermissionDenied: Could not json the ExperimentArtifactTable.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/artifactTables/{experimentArtifactTableName}/json",
+ query_params={
+ "offset": offset,
+ "pageSize": page_size,
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentArtifactTableName": experiment_artifact_table_name,
+ },
+ header_params={
+ "Accept": "application/octet-stream",
+ },
+ body=None,
+ response_type=bytes,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "JsonExperimentArtifactTablePermissionDenied": models_errors.JsonExperimentArtifactTablePermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def parquet(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_artifact_table_name: models_models.ExperimentArtifactName,
+ *,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> core.TableResponse:
+ """
+ Read raw table data from experiment artifacts in Parquet format.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_artifact_table_name:
+ :type experiment_artifact_table_name: ExperimentArtifactName
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: core.TableResponse
+
+
+ :raises ParquetExperimentArtifactTablePermissionDenied: Could not parquet the ExperimentArtifactTable.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/artifactTables/{experimentArtifactTableName}/parquet",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentArtifactTableName": experiment_artifact_table_name,
+ },
+ header_params={
+ "Accept": "application/octet-stream",
+ },
+ body=None,
+ response_type=bytes,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "ParquetExperimentArtifactTablePermissionDenied": models_errors.ParquetExperimentArtifactTablePermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode", "PARQUET_TABLE"),
+ ),
+ )
+
+
+class _ExperimentArtifactTableClientRaw:
+ def __init__(self, client: ExperimentArtifactTableClient) -> None:
+ def json(_: bytes): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.with_raw_response(json, client.json)
+ self.parquet = core.with_raw_response(parquet, client.parquet)
+
+
+class _ExperimentArtifactTableClientStreaming:
+ def __init__(self, client: ExperimentArtifactTableClient) -> None:
+ def json(_: bytes): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.with_streaming_response(json, client.json)
+ self.parquet = core.with_streaming_response(parquet, client.parquet)
+
+
+class AsyncExperimentArtifactTableClient:
+ """
+ The API client for the ExperimentArtifactTable Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.AsyncApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _AsyncExperimentArtifactTableClientStreaming(self)
+ self.with_raw_response = _AsyncExperimentArtifactTableClientRaw(self)
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def json(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_artifact_table_name: models_models.ExperimentArtifactName,
+ *,
+ offset: typing.Optional[int] = None,
+ page_size: typing.Optional[core_models.PageSize] = None,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> typing.Awaitable[bytes]:
+ """
+ Read table data from an experiment artifact as a streamed binary response containing JSON.
+ The response body is a JSON array of row objects, where each object maps column names to values.
+ Results are paginated by row count with a default page size of 10 and a maximum of 100.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_artifact_table_name:
+ :type experiment_artifact_table_name: ExperimentArtifactName
+ :param offset: Number of rows to skip from the beginning. Defaults to 0.
+ :type offset: Optional[int]
+ :param page_size: Maximum number of rows to return. Default is 10, maximum is 100.
+ :type page_size: Optional[PageSize]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: typing.Awaitable[bytes]
+
+ :raises JsonExperimentArtifactTablePermissionDenied: Could not json the ExperimentArtifactTable.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/artifactTables/{experimentArtifactTableName}/json",
+ query_params={
+ "offset": offset,
+ "pageSize": page_size,
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentArtifactTableName": experiment_artifact_table_name,
+ },
+ header_params={
+ "Accept": "application/octet-stream",
+ },
+ body=None,
+ response_type=bytes,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "JsonExperimentArtifactTablePermissionDenied": models_errors.JsonExperimentArtifactTablePermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def parquet(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_artifact_table_name: models_models.ExperimentArtifactName,
+ *,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> typing.Awaitable[core.TableResponse]:
+ """
+ Read raw table data from experiment artifacts in Parquet format.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_artifact_table_name:
+ :type experiment_artifact_table_name: ExperimentArtifactName
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: typing.Awaitable[core.TableResponse
+ ]
+
+ :raises ParquetExperimentArtifactTablePermissionDenied: Could not parquet the ExperimentArtifactTable.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/artifactTables/{experimentArtifactTableName}/parquet",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentArtifactTableName": experiment_artifact_table_name,
+ },
+ header_params={
+ "Accept": "application/octet-stream",
+ },
+ body=None,
+ response_type=bytes,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "ParquetExperimentArtifactTablePermissionDenied": models_errors.ParquetExperimentArtifactTablePermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode", "PARQUET_TABLE"),
+ ),
+ )
+
+
+class _AsyncExperimentArtifactTableClientRaw:
+ def __init__(self, client: AsyncExperimentArtifactTableClient) -> None:
+ def json(_: bytes): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.async_with_raw_response(json, client.json)
+ self.parquet = core.async_with_raw_response(parquet, client.parquet)
+
+
+class _AsyncExperimentArtifactTableClientStreaming:
+ def __init__(self, client: AsyncExperimentArtifactTableClient) -> None:
+ def json(_: bytes): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.async_with_streaming_response(json, client.json)
+ self.parquet = core.async_with_streaming_response(parquet, client.parquet)
diff --git a/foundry_sdk/v2/models/experiment_series.py b/foundry_sdk/v2/models/experiment_series.py
new file mode 100644
index 00000000..98e54398
--- /dev/null
+++ b/foundry_sdk/v2/models/experiment_series.py
@@ -0,0 +1,357 @@
+# Copyright 2024 Palantir Technologies, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import typing
+
+import pydantic
+import typing_extensions
+
+from foundry_sdk import _core as core
+from foundry_sdk import _errors as errors
+from foundry_sdk.v2.core import models as core_models
+from foundry_sdk.v2.models import errors as models_errors
+from foundry_sdk.v2.models import models as models_models
+
+
+class ExperimentSeriesClient:
+ """
+ The API client for the ExperimentSeries Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.ApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _ExperimentSeriesClientStreaming(self)
+ self.with_raw_response = _ExperimentSeriesClientRaw(self)
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def json(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_series_name: models_models.SeriesName,
+ *,
+ offset: typing.Optional[int] = None,
+ page_size: typing.Optional[core_models.PageSize] = None,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> models_models.Series:
+ """
+ Retrieve raw time-series data for a single series in JSON format.
+ Results are paginated with a default page size of 200 and a maximum of 1000.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_series_name:
+ :type experiment_series_name: SeriesName
+ :param offset: Number of values to skip from the beginning. Defaults to 0.
+ :type offset: Optional[int]
+ :param page_size: Maximum number of values to return per page. Default is 200, maximum is 1000.
+ :type page_size: Optional[PageSize]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: models_models.Series
+
+ :raises JsonExperimentSeriesPermissionDenied: Could not json the ExperimentSeries.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/series/{experimentSeriesName}/json",
+ query_params={
+ "offset": offset,
+ "pageSize": page_size,
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentSeriesName": experiment_series_name,
+ },
+ header_params={
+ "Accept": "application/json",
+ },
+ body=None,
+ response_type=models_models.Series,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "JsonExperimentSeriesPermissionDenied": models_errors.JsonExperimentSeriesPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def parquet(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_series_name: models_models.SeriesName,
+ *,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> core.TableResponse:
+ """
+ Retrieve raw time-series data for a single series as a streamed binary response in Apache Parquet format.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_series_name:
+ :type experiment_series_name: SeriesName
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: core.TableResponse
+
+
+ :raises ParquetExperimentSeriesPermissionDenied: Could not parquet the ExperimentSeries.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/series/{experimentSeriesName}/parquet",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentSeriesName": experiment_series_name,
+ },
+ header_params={
+ "Accept": "application/octet-stream",
+ },
+ body=None,
+ response_type=bytes,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "ParquetExperimentSeriesPermissionDenied": models_errors.ParquetExperimentSeriesPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode", "PARQUET_TABLE"),
+ ),
+ )
+
+
+class _ExperimentSeriesClientRaw:
+ def __init__(self, client: ExperimentSeriesClient) -> None:
+ def json(_: models_models.Series): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.with_raw_response(json, client.json)
+ self.parquet = core.with_raw_response(parquet, client.parquet)
+
+
+class _ExperimentSeriesClientStreaming:
+ def __init__(self, client: ExperimentSeriesClient) -> None:
+ def json(_: models_models.Series): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.with_streaming_response(json, client.json)
+ self.parquet = core.with_streaming_response(parquet, client.parquet)
+
+
+class AsyncExperimentSeriesClient:
+ """
+ The API client for the ExperimentSeries Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.AsyncApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _AsyncExperimentSeriesClientStreaming(self)
+ self.with_raw_response = _AsyncExperimentSeriesClientRaw(self)
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def json(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_series_name: models_models.SeriesName,
+ *,
+ offset: typing.Optional[int] = None,
+ page_size: typing.Optional[core_models.PageSize] = None,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> typing.Awaitable[models_models.Series]:
+ """
+ Retrieve raw time-series data for a single series in JSON format.
+ Results are paginated with a default page size of 200 and a maximum of 1000.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_series_name:
+ :type experiment_series_name: SeriesName
+ :param offset: Number of values to skip from the beginning. Defaults to 0.
+ :type offset: Optional[int]
+ :param page_size: Maximum number of values to return per page. Default is 200, maximum is 1000.
+ :type page_size: Optional[PageSize]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: typing.Awaitable[models_models.Series]
+
+ :raises JsonExperimentSeriesPermissionDenied: Could not json the ExperimentSeries.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/series/{experimentSeriesName}/json",
+ query_params={
+ "offset": offset,
+ "pageSize": page_size,
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentSeriesName": experiment_series_name,
+ },
+ header_params={
+ "Accept": "application/json",
+ },
+ body=None,
+ response_type=models_models.Series,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "JsonExperimentSeriesPermissionDenied": models_errors.JsonExperimentSeriesPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def parquet(
+ self,
+ model_rid: models_models.ModelRid,
+ experiment_rid: models_models.ExperimentRid,
+ experiment_series_name: models_models.SeriesName,
+ *,
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> typing.Awaitable[core.TableResponse]:
+ """
+ Retrieve raw time-series data for a single series as a streamed binary response in Apache Parquet format.
+
+ :param model_rid:
+ :type model_rid: ModelRid
+ :param experiment_rid:
+ :type experiment_rid: ExperimentRid
+ :param experiment_series_name:
+ :type experiment_series_name: SeriesName
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: typing.Awaitable[core.TableResponse
+ ]
+
+ :raises ParquetExperimentSeriesPermissionDenied: Could not parquet the ExperimentSeries.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="GET",
+ resource_path="/v2/models/{modelRid}/experiments/{experimentRid}/series/{experimentSeriesName}/parquet",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "modelRid": model_rid,
+ "experimentRid": experiment_rid,
+ "experimentSeriesName": experiment_series_name,
+ },
+ header_params={
+ "Accept": "application/octet-stream",
+ },
+ body=None,
+ response_type=bytes,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "ParquetExperimentSeriesPermissionDenied": models_errors.ParquetExperimentSeriesPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode", "PARQUET_TABLE"),
+ ),
+ )
+
+
+class _AsyncExperimentSeriesClientRaw:
+ def __init__(self, client: AsyncExperimentSeriesClient) -> None:
+ def json(_: models_models.Series): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.async_with_raw_response(json, client.json)
+ self.parquet = core.async_with_raw_response(parquet, client.parquet)
+
+
+class _AsyncExperimentSeriesClientStreaming:
+ def __init__(self, client: AsyncExperimentSeriesClient) -> None:
+ def json(_: models_models.Series): ...
+ def parquet(_: bytes): ...
+
+ self.json = core.async_with_streaming_response(json, client.json)
+ self.parquet = core.async_with_streaming_response(parquet, client.parquet)
diff --git a/foundry_sdk/v2/models/live_deployment.py b/foundry_sdk/v2/models/live_deployment.py
new file mode 100644
index 00000000..c9c0142e
--- /dev/null
+++ b/foundry_sdk/v2/models/live_deployment.py
@@ -0,0 +1,213 @@
+# Copyright 2024 Palantir Technologies, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import typing
+
+import pydantic
+import typing_extensions
+
+from foundry_sdk import _core as core
+from foundry_sdk import _errors as errors
+from foundry_sdk.v2.core import models as core_models
+from foundry_sdk.v2.models import errors as models_errors
+from foundry_sdk.v2.models import models as models_models
+
+
+class LiveDeploymentClient:
+ """
+ The API client for the LiveDeployment Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.ApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _LiveDeploymentClientStreaming(self)
+ self.with_raw_response = _LiveDeploymentClientRaw(self)
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def transform_json(
+ self,
+ live_deployment_rid: models_models.LiveDeploymentRid,
+ *,
+ input: typing.Dict[str, typing.Any],
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> models_models.TransformLiveDeploymentResponse:
+ """
+ Performs inference on the live deployment.
+
+ :param live_deployment_rid:
+ :type live_deployment_rid: LiveDeploymentRid
+ :param input: The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data.
+ :type input: Dict[str, Any]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: models_models.TransformLiveDeploymentResponse
+
+ :raises TransformJsonLiveDeploymentPermissionDenied: Could not transformJson the LiveDeployment.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="POST",
+ resource_path="/v2/models/liveDeployments/{liveDeploymentRid}/transformJson",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "liveDeploymentRid": live_deployment_rid,
+ },
+ header_params={
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ },
+ body=models_models.TransformJsonLiveDeploymentRequest(
+ input=input,
+ ),
+ response_type=models_models.TransformLiveDeploymentResponse,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "TransformJsonLiveDeploymentPermissionDenied": models_errors.TransformJsonLiveDeploymentPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+
+class _LiveDeploymentClientRaw:
+ def __init__(self, client: LiveDeploymentClient) -> None:
+ def transform_json(_: models_models.TransformLiveDeploymentResponse): ...
+
+ self.transform_json = core.with_raw_response(transform_json, client.transform_json)
+
+
+class _LiveDeploymentClientStreaming:
+ def __init__(self, client: LiveDeploymentClient) -> None:
+ def transform_json(_: models_models.TransformLiveDeploymentResponse): ...
+
+ self.transform_json = core.with_streaming_response(transform_json, client.transform_json)
+
+
+class AsyncLiveDeploymentClient:
+ """
+ The API client for the LiveDeployment Resource.
+
+ :param auth: Your auth configuration.
+ :param hostname: Your Foundry hostname (for example, "myfoundry.palantirfoundry.com"). This can also include your API gateway service URI.
+ :param config: Optionally specify the configuration for the HTTP session.
+ """
+
+ def __init__(
+ self,
+ auth: core.Auth,
+ hostname: str,
+ config: typing.Optional[core.Config] = None,
+ ):
+ self._auth = auth
+ self._hostname = hostname
+ self._config = config
+ self._api_client = core.AsyncApiClient(auth=auth, hostname=hostname, config=config)
+
+ self.with_streaming_response = _AsyncLiveDeploymentClientStreaming(self)
+ self.with_raw_response = _AsyncLiveDeploymentClientRaw(self)
+
+ @core.maybe_ignore_preview
+ @pydantic.validate_call
+ @errors.handle_unexpected
+ def transform_json(
+ self,
+ live_deployment_rid: models_models.LiveDeploymentRid,
+ *,
+ input: typing.Dict[str, typing.Any],
+ preview: typing.Optional[core_models.PreviewMode] = None,
+ request_timeout: typing.Optional[core.Timeout] = None,
+ _sdk_internal: core.SdkInternal = {},
+ ) -> typing.Awaitable[models_models.TransformLiveDeploymentResponse]:
+ """
+ Performs inference on the live deployment.
+
+ :param live_deployment_rid:
+ :type live_deployment_rid: LiveDeploymentRid
+ :param input: The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data.
+ :type input: Dict[str, Any]
+ :param preview: Enables the use of preview functionality.
+ :type preview: Optional[PreviewMode]
+ :param request_timeout: timeout setting for this request in seconds.
+ :type request_timeout: Optional[int]
+ :return: Returns the result object.
+ :rtype: typing.Awaitable[models_models.TransformLiveDeploymentResponse]
+
+ :raises TransformJsonLiveDeploymentPermissionDenied: Could not transformJson the LiveDeployment.
+ """
+
+ return self._api_client.call_api(
+ core.RequestInfo(
+ method="POST",
+ resource_path="/v2/models/liveDeployments/{liveDeploymentRid}/transformJson",
+ query_params={
+ "preview": preview,
+ },
+ path_params={
+ "liveDeploymentRid": live_deployment_rid,
+ },
+ header_params={
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ },
+ body=models_models.TransformJsonLiveDeploymentRequest(
+ input=input,
+ ),
+ response_type=models_models.TransformLiveDeploymentResponse,
+ request_timeout=request_timeout,
+ throwable_errors={
+ "TransformJsonLiveDeploymentPermissionDenied": models_errors.TransformJsonLiveDeploymentPermissionDenied,
+ },
+ response_mode=_sdk_internal.get("response_mode"),
+ ),
+ )
+
+
+class _AsyncLiveDeploymentClientRaw:
+ def __init__(self, client: AsyncLiveDeploymentClient) -> None:
+ def transform_json(_: models_models.TransformLiveDeploymentResponse): ...
+
+ self.transform_json = core.async_with_raw_response(transform_json, client.transform_json)
+
+
+class _AsyncLiveDeploymentClientStreaming:
+ def __init__(self, client: AsyncLiveDeploymentClient) -> None:
+ def transform_json(_: models_models.TransformLiveDeploymentResponse): ...
+
+ self.transform_json = core.async_with_streaming_response(
+ transform_json, client.transform_json
+ )
diff --git a/foundry_sdk/v2/models/model.py b/foundry_sdk/v2/models/model.py
index 6bcdb362..b5fb8ce5 100644
--- a/foundry_sdk/v2/models/model.py
+++ b/foundry_sdk/v2/models/model.py
@@ -50,6 +50,16 @@ def __init__(
self.with_streaming_response = _ModelClientStreaming(self)
self.with_raw_response = _ModelClientRaw(self)
+ @cached_property
+ def Experiment(self):
+ from foundry_sdk.v2.models.experiment import ExperimentClient
+
+ return ExperimentClient(
+ auth=self._auth,
+ hostname=self._hostname,
+ config=self._config,
+ )
+
@cached_property
def Version(self):
from foundry_sdk.v2.models.model_version import ModelVersionClient
@@ -203,6 +213,16 @@ def __init__(
self.with_streaming_response = _AsyncModelClientStreaming(self)
self.with_raw_response = _AsyncModelClientRaw(self)
+ @cached_property
+ def Experiment(self):
+ from foundry_sdk.v2.models.experiment import AsyncExperimentClient
+
+ return AsyncExperimentClient(
+ auth=self._auth,
+ hostname=self._hostname,
+ config=self._config,
+ )
+
@cached_property
def Version(self):
from foundry_sdk.v2.models.model_version import AsyncModelVersionClient
diff --git a/foundry_sdk/v2/models/models.py b/foundry_sdk/v2/models/models.py
index 039c40c6..5d444291 100644
--- a/foundry_sdk/v2/models/models.py
+++ b/foundry_sdk/v2/models/models.py
@@ -24,6 +24,14 @@
from foundry_sdk.v2.core import models as core_models
from foundry_sdk.v2.filesystem import models as filesystem_models
+
+class BooleanParameter(core.ModelBase):
+ """A boolean parameter value."""
+
+ value: bool
+ type: typing.Literal["boolean"] = "boolean"
+
+
ColumnTypeSpecId = str
"""An identifier for a column type specification."""
@@ -91,6 +99,13 @@ class DatasetInput(core.ModelBase):
type: typing.Literal["dataset"] = "dataset"
+class DatetimeParameter(core.ModelBase):
+ """A datetime parameter value."""
+
+ value: core.AwareDatetime
+ type: typing.Literal["datetime"] = "datetime"
+
+
class DillModelFiles(core.ModelBase):
"""DillModelFiles"""
@@ -98,10 +113,205 @@ class DillModelFiles(core.ModelBase):
type: typing.Literal["dill"] = "dill"
+class DoubleParameter(core.ModelBase):
+ """A double parameter value."""
+
+ value: float
+ type: typing.Literal["double"] = "double"
+
+
+class DoubleSeriesAggregations(core.ModelBase):
+ """Aggregated statistics for numeric series."""
+
+ min: float
+ """Minimum value in the series"""
+
+ max: float
+ """Maximum value in the series"""
+
+ last: float
+ """Most recent value in the series"""
+
+ type: typing.Literal["double"] = "double"
+
+
+class DoubleSeriesV1(core.ModelBase):
+ """A series of double values."""
+
+ series: typing.List[DoubleSeriesValueV1]
+ type: typing.Literal["doubleV1"] = "doubleV1"
+
+
+class DoubleSeriesValueV1(core.ModelBase):
+ """A single double value in a series."""
+
+ value: float
+ timestamp: EpochMillis
+ """Milliseconds since unix time zero"""
+
+ step: core.Long
+
+
+EpochMillis = core.Long
+"""
+Milliseconds since unix time zero. This representation is used to maintain consistency with the Parquet
+format.
+"""
+
+
+class Experiment(core.ModelBase):
+ """Experiment"""
+
+ rid: ExperimentRid
+ model_rid: ModelRid = pydantic.Field(alias=str("modelRid")) # type: ignore[literal-required]
+ name: ExperimentName
+ created_at: core_models.CreatedTime = pydantic.Field(alias=str("createdAt")) # type: ignore[literal-required]
+ created_by: core_models.CreatedBy = pydantic.Field(alias=str("createdBy")) # type: ignore[literal-required]
+ source: ExperimentSource
+ status: ExperimentStatus
+ status_message: typing.Optional[str] = pydantic.Field(alias=str("statusMessage"), default=None) # type: ignore[literal-required]
+ branch: ExperimentBranch
+ parameters: typing.List[Parameter]
+ series: typing.List[SeriesAggregations]
+ summary_metrics: typing.List[SummaryMetric] = pydantic.Field(alias=str("summaryMetrics")) # type: ignore[literal-required]
+ artifacts: typing.Dict[ExperimentArtifactName, ExperimentArtifactMetadata]
+ tags: typing.List[ExperimentTagText]
+ linked_model_version: typing.Optional[ModelVersionRid] = pydantic.Field(alias=str("linkedModelVersion"), default=None) # type: ignore[literal-required]
+ job_rid: typing.Optional[core_models.JobRid] = pydantic.Field(alias=str("jobRid"), default=None) # type: ignore[literal-required]
+
+
+class ExperimentArtifactMetadata(core.ModelBase):
+ """Metadata about an experiment artifact."""
+
+ name: ExperimentArtifactName
+ description: typing.Optional[str] = None
+ size_bytes: core_models.SizeBytes = pydantic.Field(alias=str("sizeBytes")) # type: ignore[literal-required]
+ details: ExperimentArtifactDetails
+
+
+ExperimentArtifactName = str
+"""The name of an experiment artifact."""
+
+
+class ExperimentAuthoringSource(core.ModelBase):
+ """Experiment created from an authoring repository."""
+
+ stemma_rid: core.RID = pydantic.Field(alias=str("stemmaRid")) # type: ignore[literal-required]
+ type: typing.Literal["authoring"] = "authoring"
+
+
+ExperimentBranch = str
+"""ExperimentBranch"""
+
+
+class ExperimentCodeWorkspaceSource(core.ModelBase):
+ """Experiment created from a code workspace."""
+
+ container_rid: core.RID = pydantic.Field(alias=str("containerRid")) # type: ignore[literal-required]
+ deployment_rid: typing.Optional[core.RID] = pydantic.Field(alias=str("deploymentRid"), default=None) # type: ignore[literal-required]
+ type: typing.Literal["codeWorkspace"] = "codeWorkspace"
+
+
+ExperimentName = str
+"""ExperimentName"""
+
+
+ExperimentRid = core.RID
+"""The Resource Identifier (RID) of an Experiment."""
+
+
+class ExperimentSdkSource(core.ModelBase):
+ """Experiment created from the SDK."""
+
+ type: typing.Literal["sdk"] = "sdk"
+
+
+ExperimentSource = typing_extensions.Annotated[
+ typing.Union[
+ "ExperimentCodeWorkspaceSource", "ExperimentAuthoringSource", "ExperimentSdkSource"
+ ],
+ pydantic.Field(discriminator="type"),
+]
+"""The source from which the experiment was created."""
+
+
+ExperimentStatus = typing.Literal["RUNNING", "SUCCEEDED", "FAILED"]
+"""The current status of an experiment."""
+
+
+ExperimentTagText = str
+"""A tag associated with an experiment."""
+
+
+class InconsistentArrayDimensionsError(core.ModelBase):
+ """Array elements have inconsistent dimensions."""
+
+ first_element_shape: typing.List[int] = pydantic.Field(alias=str("firstElementShape")) # type: ignore[literal-required]
+ """The shape of the first array element"""
+
+ conflicting_element_shape: typing.List[int] = pydantic.Field(alias=str("conflictingElementShape")) # type: ignore[literal-required]
+ """The shape of the conflicting array element"""
+
+ type: typing.Literal["inconsistentArrayDimensions"] = "inconsistentArrayDimensions"
+
+
+InferenceInputErrorType = typing_extensions.Annotated[
+ typing.Union[
+ "InvalidArrayShapeError",
+ "TypeMismatchError",
+ "UnsupportedTypeError",
+ "UnknownInputNameError",
+ "InvalidTabularFormatError",
+ "InconsistentArrayDimensionsError",
+ "RequiredValueMissingError",
+ "InvalidMapFormatError",
+ ],
+ pydantic.Field(discriminator="type"),
+]
+"""
+The specific type and details of an input validation error for inference requests.
+Each variant carries parameters relevant to that specific error category.
+"""
+
+
InputAlias = str
"""A string alias used to identify inputs in a Model Studio configuration."""
+class IntegerParameter(core.ModelBase):
+ """An integer parameter value."""
+
+ value: core.Long
+ type: typing.Literal["integer"] = "integer"
+
+
+class InvalidArrayShapeError(core.ModelBase):
+ """Array dimensions do not match expected ndarray shape."""
+
+ expected_shape: typing.List[int] = pydantic.Field(alias=str("expectedShape")) # type: ignore[literal-required]
+ """The expected array shape from the model API specification"""
+
+ actual_shape: typing.Optional[typing.List[int]] = pydantic.Field(alias=str("actualShape"), default=None) # type: ignore[literal-required]
+ """The actual shape of the provided array"""
+
+ type: typing.Literal["invalidArrayShape"] = "invalidArrayShape"
+
+
+class InvalidMapFormatError(core.ModelBase):
+ """Map input has incorrect structure or null keys."""
+
+ type: typing.Literal["invalidMapFormat"] = "invalidMapFormat"
+
+
+class InvalidTabularFormatError(core.ModelBase):
+ """Tabular input has incorrect JSON structure."""
+
+ input_field_name: str = pydantic.Field(alias=str("inputFieldName")) # type: ignore[literal-required]
+ """The name of the tabular input field with incorrect format"""
+
+ type: typing.Literal["invalidTabularFormat"] = "invalidTabularFormat"
+
+
class ListModelStudioConfigVersionsResponse(core.ModelBase):
"""ListModelStudioConfigVersionsResponse"""
@@ -130,6 +340,10 @@ class ListModelVersionsResponse(core.ModelBase):
next_page_token: typing.Optional[core_models.PageToken] = pydantic.Field(alias=str("nextPageToken"), default=None) # type: ignore[literal-required]
+LiveDeploymentRid = core.RID
+"""The Resource Identifier (RID) of a Live Deployment."""
+
+
class Model(core.ModelBase):
"""Model"""
@@ -415,6 +629,42 @@ class ModelVersion(core.ModelBase):
"""A string alias used to identify outputs in a Model Studio configuration."""
+class Parameter(core.ModelBase):
+ """A parameter with its name and value."""
+
+ name: ParameterName
+ """The parameter name"""
+
+ value: ParameterValue
+ """The parameter value"""
+
+
+ParameterName = str
+"""The name of an experiment parameter."""
+
+
+ParameterValue = typing_extensions.Annotated[
+ typing.Union[
+ "DatetimeParameter",
+ "BooleanParameter",
+ "StringParameter",
+ "DoubleParameter",
+ "IntegerParameter",
+ ],
+ pydantic.Field(discriminator="type"),
+]
+"""A parameter value logged for an experiment."""
+
+
+class RequiredValueMissingError(core.ModelBase):
+ """Required input field is null or missing."""
+
+ field_name: str = pydantic.Field(alias=str("fieldName")) # type: ignore[literal-required]
+ """The name of the required field that was null or missing"""
+
+ type: typing.Literal["requiredValueMissing"] = "requiredValueMissing"
+
+
class ResourceConfiguration(core.ModelBase):
"""Compute resource configuration for training runs."""
@@ -429,6 +679,265 @@ class ResourceConfiguration(core.ModelBase):
"""A unique identifier for a Model Studio run, derived from the studio, config, and build."""
+class SearchExperimentsAndFilter(core.ModelBase):
+ """Returns experiments where every filter is satisfied."""
+
+ filters: typing.List[SearchExperimentsFilter]
+ type: typing.Literal["and"] = "and"
+
+
+class SearchExperimentsContainsFilter(core.ModelBase):
+ """Filter for substring containment matches."""
+
+ field: SearchExperimentsContainsFilterField
+ value: typing.Any
+ type: typing.Literal["contains"] = "contains"
+
+
+SearchExperimentsContainsFilterField = typing.Literal[
+ "EXPERIMENT_NAME", "PARAMETER_NAME", "SERIES_NAME"
+]
+"""Fields that support substring containment filtering."""
+
+
+class SearchExperimentsEqualsFilter(core.ModelBase):
+ """Filter for exact field value matches."""
+
+ field: SearchExperimentsEqualsFilterField
+ value: typing.Any
+ type: typing.Literal["eq"] = "eq"
+
+
+SearchExperimentsEqualsFilterField = typing.Literal[
+ "STATUS",
+ "BRANCH",
+ "EXPERIMENT_NAME",
+ "EXPERIMENT_RID",
+ "JOB_RID",
+ "TAG",
+ "PARAMETER_NAME",
+ "SERIES_NAME",
+]
+"""Fields that support equality filtering."""
+
+
+SearchExperimentsFilter = typing_extensions.Annotated[
+ typing.Union[
+ "SearchExperimentsSeriesFilter",
+ "SearchExperimentsContainsFilter",
+ "SearchExperimentsNotFilter",
+ "SearchExperimentsOrFilter",
+ "SearchExperimentsAndFilter",
+ "SearchExperimentsParameterFilter",
+ "SearchExperimentsSummaryMetricFilter",
+ "SearchExperimentsEqualsFilter",
+ "SearchExperimentsStartsWithFilter",
+ ],
+ pydantic.Field(discriminator="type"),
+]
+"""
+Filter for searching experiments using operator-based composition.
+Supports equality, text matching, boolean combination operators, and compound filters
+that atomically bind a name to a value comparison.
+
+Example filters:
+- Simple status: {"eq": {"field": "STATUS", "value": "RUNNING"}}
+- Branch match: {"eq": {"field": "BRANCH", "value": "master"}}
+- Parameter filter: {"parameterFilter": {"parameterName": "learning_rate", "operator": "GT", "value": 0.01}}
+- Combined: {"and": {"filters": [
+ {"eq": {"field": "STATUS", "value": "SUCCEEDED"}},
+ {"parameterFilter": {"parameterName": "learning_rate", "operator": "GT", "value": 0.5}}
+ ]}}
+"""
+
+
+SearchExperimentsFilterOperator = typing.Literal["EQ", "GT", "LT", "CONTAINS"]
+"""Comparison operator for compound filter predicates."""
+
+
+class SearchExperimentsNotFilter(core.ModelBase):
+ """Returns experiments where the filter is not satisfied."""
+
+ value: SearchExperimentsFilter
+ type: typing.Literal["not"] = "not"
+
+
+class SearchExperimentsOrFilter(core.ModelBase):
+ """Returns experiments where at least one filter is satisfied."""
+
+ filters: typing.List[SearchExperimentsFilter]
+ type: typing.Literal["or"] = "or"
+
+
+class SearchExperimentsOrderBy(core.ModelBase):
+ """Ordering configuration for experiment search results."""
+
+ field: SearchExperimentsOrderByField
+ direction: core_models.OrderByDirection
+
+
+SearchExperimentsOrderByField = typing.Literal["EXPERIMENT_NAME", "CREATED_AT"]
+"""Fields to order experiment search results by."""
+
+
+class SearchExperimentsParameterFilter(core.ModelBase):
+ """
+ Filter that atomically binds a parameter name to a value comparison,
+ ensuring both conditions are evaluated on the same parameter.
+ Supported combinations:
+ - EQ: boolean, double, integer, datetime, or string value
+ - GT/LT: double, integer, or datetime value
+ - CONTAINS: string value (matches the parameter's string value)
+ """
+
+ parameter_name: ParameterName = pydantic.Field(alias=str("parameterName")) # type: ignore[literal-required]
+ """The exact name of the parameter to filter on."""
+
+ operator: SearchExperimentsFilterOperator
+ """The comparison operator to apply."""
+
+ value: typing.Any
+ """The value to compare against."""
+
+ type: typing.Literal["parameterFilter"] = "parameterFilter"
+
+
+class SearchExperimentsRequest(core.ModelBase):
+ """SearchExperimentsRequest"""
+
+ where: typing.Optional[SearchExperimentsFilter] = None
+ """Optional search filter for filtering experiments. If not provided, all experiments for the model are returned."""
+
+ order_by: typing.Optional[SearchExperimentsOrderBy] = pydantic.Field(alias=str("orderBy"), default=None) # type: ignore[literal-required]
+ """The field to sort by. Default is to sort by relevance."""
+
+ page_size: typing.Optional[core_models.PageSize] = pydantic.Field(alias=str("pageSize"), default=None) # type: ignore[literal-required]
+ """The maximum number of results to return. Default 50, maximum of 100."""
+
+ page_token: typing.Optional[core_models.PageToken] = pydantic.Field(alias=str("pageToken"), default=None) # type: ignore[literal-required]
+ """PageToken to identify the next page to retrieve. Leave empty for the first request."""
+
+
+class SearchExperimentsResponse(core.ModelBase):
+ """Response from searching experiments."""
+
+ data: typing.List[Experiment]
+ """List of experiments matching the search criteria."""
+
+ next_page_token: typing.Optional[core_models.PageToken] = pydantic.Field(alias=str("nextPageToken"), default=None) # type: ignore[literal-required]
+ """Token for retrieving the next page of results, if more results are available."""
+
+
+class SearchExperimentsSeriesFilter(core.ModelBase):
+ """
+ Filter that atomically binds a series name to a metric comparison,
+ ensuring all conditions are evaluated on the same series.
+ """
+
+ series_name: SeriesName = pydantic.Field(alias=str("seriesName")) # type: ignore[literal-required]
+ """The name of the series to filter on."""
+
+ field: SearchExperimentsSeriesFilterField
+ """The series metric to compare."""
+
+ operator: SearchExperimentsFilterOperator
+ """The comparison operator (EQ, GT, or LT)."""
+
+ value: typing.Any
+ """The value to compare against."""
+
+ type: typing.Literal["seriesFilter"] = "seriesFilter"
+
+
+SearchExperimentsSeriesFilterField = typing.Literal[
+ "LENGTH", "AGGREGATION_MIN", "AGGREGATION_MAX", "AGGREGATION_LAST"
+]
+"""The series metric to filter on."""
+
+
+class SearchExperimentsStartsWithFilter(core.ModelBase):
+ """Filter for prefix matches."""
+
+ field: SearchExperimentsStartsWithFilterField
+ value: typing.Any
+ type: typing.Literal["startsWith"] = "startsWith"
+
+
+SearchExperimentsStartsWithFilterField = typing.Literal[
+ "EXPERIMENT_NAME", "PARAMETER_NAME", "SERIES_NAME"
+]
+"""Fields that support prefix filtering."""
+
+
+class SearchExperimentsSummaryMetricFilter(core.ModelBase):
+ """
+ Filter that atomically binds a series name and aggregation type to a value comparison,
+ ensuring all conditions are evaluated on the same summary metric.
+ """
+
+ series_name: SeriesName = pydantic.Field(alias=str("seriesName")) # type: ignore[literal-required]
+ """The name of the series this metric belongs to."""
+
+ aggregation: SummaryMetricAggregation
+ """The aggregation type (MIN, MAX, LAST)."""
+
+ operator: SearchExperimentsFilterOperator
+ """The comparison operator (EQ, GT, or LT)."""
+
+ value: typing.Any
+ """The value to compare against."""
+
+ type: typing.Literal["summaryMetricFilter"] = "summaryMetricFilter"
+
+
+class SeriesAggregations(core.ModelBase):
+ """Series with precomputed aggregation values."""
+
+ name: SeriesName
+ """The series name"""
+
+ length: core.Long
+ """Number of values in the series"""
+
+ value: SeriesAggregationsValue
+ """Aggregated values for this series"""
+
+
+SeriesName = str
+"""The name of a series (metrics tracked over time)."""
+
+
+class StringParameter(core.ModelBase):
+ """A string parameter value."""
+
+ value: str
+ type: typing.Literal["string"] = "string"
+
+
+class SummaryMetric(core.ModelBase):
+ """A summary metric with series name, aggregation type, and computed value."""
+
+ series_name: SeriesName = pydantic.Field(alias=str("seriesName")) # type: ignore[literal-required]
+ """Name of the series this metric belongs to"""
+
+ aggregation: SummaryMetricAggregation
+ """Type of aggregation (MIN, MAX, LAST)"""
+
+ value: float
+ """The computed value"""
+
+
+SummaryMetricAggregation = typing.Literal["MIN", "MAX", "LAST"]
+"""The type of aggregation computed for a summary metric."""
+
+
+class TableArtifactDetails(core.ModelBase):
+ """Details about a table artifact."""
+
+ row_count: core.Long = pydantic.Field(alias=str("rowCount")) # type: ignore[literal-required]
+ type: typing.Literal["table"] = "table"
+
+
TrainerDescription = str
"""Description of what a trainer does and its capabilities."""
@@ -468,6 +977,54 @@ class TrainerVersionLocator(core.ModelBase):
version: str
+class TransformJsonLiveDeploymentRequest(core.ModelBase):
+ """TransformJsonLiveDeploymentRequest"""
+
+ input: typing.Dict[str, typing.Any]
+ """The input data for the model inference. The structure should match the model's transform API specification, where each key is an input name and the value is the corresponding input data."""
+
+
+class TransformLiveDeploymentResponse(core.ModelBase):
+ """The response from transforming input data using a live deployment."""
+
+ output: typing.Dict[str, typing.Any]
+ """The output data from the model inference. The structure depends on the model's defined API specification, where each key is an output name and the value is the corresponding output data."""
+
+
+class TypeMismatchError(core.ModelBase):
+ """Input type does not match expected type in model API."""
+
+ expected_type: str = pydantic.Field(alias=str("expectedType")) # type: ignore[literal-required]
+ """The expected type from the model API specification"""
+
+ actual_type: str = pydantic.Field(alias=str("actualType")) # type: ignore[literal-required]
+ """The actual type provided in the input"""
+
+ type: typing.Literal["typeMismatch"] = "typeMismatch"
+
+
+class UnknownInputNameError(core.ModelBase):
+ """Provided input name not found in model API specification."""
+
+ input_name: str = pydantic.Field(alias=str("inputName")) # type: ignore[literal-required]
+ """The input name that was not found in the model API specification"""
+
+ type: typing.Literal["unknownInputName"] = "unknownInputName"
+
+
+class UnsupportedTypeError(core.ModelBase):
+ """Input contains an unsupported data type."""
+
+ unsupported_type: str = pydantic.Field(alias=str("unsupportedType")) # type: ignore[literal-required]
+ """The unsupported data type"""
+
+ type: typing.Literal["unsupportedType"] = "unsupportedType"
+
+
+ExperimentArtifactDetails = TableArtifactDetails
+"""Details about an experiment artifact."""
+
+
ModelFiles = DillModelFiles
"""
The serialized data of a machine learning model. This can include the model's parameters, architecture, and any other relevant information needed to reconstruct the model.
@@ -487,23 +1044,62 @@ class TrainerVersionLocator(core.ModelBase):
"""Resolved output details for a Model Studio run."""
+Series = DoubleSeriesV1
+"""A series of values logged over time."""
+
+
+SeriesAggregationsValue = DoubleSeriesAggregations
+"""Union of aggregation values by series type."""
+
+
+core.resolve_forward_references(ExperimentSource, globalns=globals(), localns=locals())
+core.resolve_forward_references(InferenceInputErrorType, globalns=globals(), localns=locals())
core.resolve_forward_references(ModelApiDataType, globalns=globals(), localns=locals())
core.resolve_forward_references(ModelApiInput, globalns=globals(), localns=locals())
core.resolve_forward_references(ModelApiOutput, globalns=globals(), localns=locals())
+core.resolve_forward_references(ParameterValue, globalns=globals(), localns=locals())
+core.resolve_forward_references(SearchExperimentsFilter, globalns=globals(), localns=locals())
__all__ = [
+ "BooleanParameter",
"ColumnTypeSpecId",
"CreateModelRequest",
"CreateModelStudioConfigVersionRequest",
"CreateModelStudioRequest",
"CreateModelVersionRequest",
"DatasetInput",
+ "DatetimeParameter",
"DillModelFiles",
+ "DoubleParameter",
+ "DoubleSeriesAggregations",
+ "DoubleSeriesV1",
+ "DoubleSeriesValueV1",
+ "EpochMillis",
+ "Experiment",
+ "ExperimentArtifactDetails",
+ "ExperimentArtifactMetadata",
+ "ExperimentArtifactName",
+ "ExperimentAuthoringSource",
+ "ExperimentBranch",
+ "ExperimentCodeWorkspaceSource",
+ "ExperimentName",
+ "ExperimentRid",
+ "ExperimentSdkSource",
+ "ExperimentSource",
+ "ExperimentStatus",
+ "ExperimentTagText",
+ "InconsistentArrayDimensionsError",
+ "InferenceInputErrorType",
"InputAlias",
+ "IntegerParameter",
+ "InvalidArrayShapeError",
+ "InvalidMapFormatError",
+ "InvalidTabularFormatError",
"ListModelStudioConfigVersionsResponse",
"ListModelStudioRunsResponse",
"ListModelStudioTrainersResponse",
"ListModelVersionsResponse",
+ "LiveDeploymentRid",
"Model",
"ModelApi",
"ModelApiAnyType",
@@ -538,8 +1134,39 @@ class TrainerVersionLocator(core.ModelBase):
"ModelVersion",
"ModelVersionRid",
"OutputAlias",
+ "Parameter",
+ "ParameterName",
+ "ParameterValue",
+ "RequiredValueMissingError",
"ResourceConfiguration",
"RunId",
+ "SearchExperimentsAndFilter",
+ "SearchExperimentsContainsFilter",
+ "SearchExperimentsContainsFilterField",
+ "SearchExperimentsEqualsFilter",
+ "SearchExperimentsEqualsFilterField",
+ "SearchExperimentsFilter",
+ "SearchExperimentsFilterOperator",
+ "SearchExperimentsNotFilter",
+ "SearchExperimentsOrFilter",
+ "SearchExperimentsOrderBy",
+ "SearchExperimentsOrderByField",
+ "SearchExperimentsParameterFilter",
+ "SearchExperimentsRequest",
+ "SearchExperimentsResponse",
+ "SearchExperimentsSeriesFilter",
+ "SearchExperimentsSeriesFilterField",
+ "SearchExperimentsStartsWithFilter",
+ "SearchExperimentsStartsWithFilterField",
+ "SearchExperimentsSummaryMetricFilter",
+ "Series",
+ "SeriesAggregations",
+ "SeriesAggregationsValue",
+ "SeriesName",
+ "StringParameter",
+ "SummaryMetric",
+ "SummaryMetricAggregation",
+ "TableArtifactDetails",
"TrainerDescription",
"TrainerId",
"TrainerInputsSpecification",
@@ -549,4 +1176,9 @@ class TrainerVersionLocator(core.ModelBase):
"TrainerType",
"TrainerVersion",
"TrainerVersionLocator",
+ "TransformJsonLiveDeploymentRequest",
+ "TransformLiveDeploymentResponse",
+ "TypeMismatchError",
+ "UnknownInputNameError",
+ "UnsupportedTypeError",
]
diff --git a/foundry_sdk/v2/ontologies/models.py b/foundry_sdk/v2/ontologies/models.py
index ff2b381e..d841f254 100644
--- a/foundry_sdk/v2/ontologies/models.py
+++ b/foundry_sdk/v2/ontologies/models.py
@@ -623,6 +623,7 @@ class BoundingBoxValue(core.ModelBase):
top_left: WithinBoundingBoxPoint = pydantic.Field(alias=str("topLeft")) # type: ignore[literal-required]
bottom_right: WithinBoundingBoxPoint = pydantic.Field(alias=str("bottomRight")) # type: ignore[literal-required]
+ type: typing.Literal["envelope"] = "envelope"
class CenterPoint(core.ModelBase):
@@ -1364,6 +1365,38 @@ class FuzzyRule(core.ModelBase):
"""Setting fuzzy to `true` allows approximate matching in search queries that support it."""
+class GeoJsonString(core.ModelBase):
+ """A GeoJSON geometry specification."""
+
+ geo_json: str = pydantic.Field(alias=str("geoJson")) # type: ignore[literal-required]
+ """
+ A GeoJSON geometry string. Supported geometry types include Point, MultiPoint, LineString,
+ MultiLineString, Polygon, MultiPolygon, and GeometryCollection.
+ """
+
+ type: typing.Literal["geoJson"] = "geoJson"
+
+
+GeoShapeV2Geometry = typing_extensions.Annotated[
+ typing.Union["BoundingBoxValue", "GeoJsonString"], pydantic.Field(discriminator="type")
+]
+"""Geometry specification for a GeoShapeV2Query. Supports bounding box envelopes and arbitrary GeoJSON geometries."""
+
+
+class GeoShapeV2Query(core.ModelBase):
+ """
+ Returns objects where the specified field satisfies the provided geometry query with the given spatial operator.
+ Supports both envelope (bounding box) and GeoJSON geometries for filtering geopoint or geoshape properties.
+ Either `field` or `propertyIdentifier` can be supplied, but not both.
+ """
+
+ field: typing.Optional[PropertyApiName] = None
+ property_identifier: typing.Optional[PropertyIdentifier] = pydantic.Field(alias=str("propertyIdentifier"), default=None) # type: ignore[literal-required]
+ geometry: GeoShapeV2Geometry
+ spatial_filter_mode: SpatialFilterMode = pydantic.Field(alias=str("spatialFilterMode")) # type: ignore[literal-required]
+ type: typing.Literal["geoShapeV2"] = "geoShapeV2"
+
+
class GeotemporalSeriesEntry(core.ModelBase):
"""A single geotemporal data point representing the location of an entity at a specific point in time."""
@@ -3094,6 +3127,7 @@ class OntologyArrayType(core.ModelBase):
core_models.CipherTextType,
core_models.MarkingType,
core_models.UnsupportedType,
+ core_models.MediaReferenceType,
"OntologyArrayType",
"OntologyObjectSetType",
core_models.BinaryType,
@@ -3784,6 +3818,7 @@ class QueryArrayType(core.ModelBase):
core_models.BooleanType,
core_models.UnsupportedType,
core_models.AttachmentType,
+ core_models.MediaReferenceType,
core_models.NullType,
"QueryArrayType",
"OntologyObjectSetType",
@@ -4095,6 +4130,7 @@ class RollingAggregateWindowPoints(core.ModelBase):
"IsNullQueryV2",
"ContainsAnyTermQuery",
"IntervalQuery",
+ "GeoShapeV2Query",
"StartsWithQuery",
],
pydantic.Field(discriminator="type"),
@@ -4384,6 +4420,14 @@ class SharedPropertyType(core.ModelBase):
"""The unique resource identifier of an shared property type, useful for interacting with other Foundry APIs."""
+SpatialFilterMode = typing.Literal["INTERSECTS", "DISJOINT", "WITHIN", "CONTAINS"]
+"""
+The spatial relation operator for a GeoShapeV2Query. INTERSECTS matches objects that intersect the provided
+geometry, DISJOINT matches objects that do not intersect the provided geometry, WITHIN matches objects that
+lie within the provided geometry, and CONTAINS matches objects that contain the provided geometry.
+"""
+
+
class StartsWithQuery(core.ModelBase):
"""
Deprecated alias for `containsAllTermsInOrderPrefixLastTerm`, which is preferred because the name `startsWith` is misleading.
@@ -5186,6 +5230,7 @@ class WithinPolygonQuery(core.ModelBase):
core.resolve_forward_references(DurationFormatStyle, globalns=globals(), localns=locals())
core.resolve_forward_references(EditHistoryEdit, globalns=globals(), localns=locals())
core.resolve_forward_references(EditsHistoryFilter, globalns=globals(), localns=locals())
+core.resolve_forward_references(GeoShapeV2Geometry, globalns=globals(), localns=locals())
core.resolve_forward_references(
InterfaceLinkTypeLinkedEntityApiName, globalns=globals(), localns=locals()
)
@@ -5416,6 +5461,9 @@ class WithinPolygonQuery(core.ModelBase):
"FunctionVersion",
"FuzzyRule",
"FuzzyV2",
+ "GeoJsonString",
+ "GeoShapeV2Geometry",
+ "GeoShapeV2Query",
"GeotemporalSeriesEntry",
"GeotimeSeriesValue",
"GetSelectedPropertyOperation",
@@ -5712,6 +5760,7 @@ class WithinPolygonQuery(core.ModelBase):
"SharedPropertyType",
"SharedPropertyTypeApiName",
"SharedPropertyTypeRid",
+ "SpatialFilterMode",
"StartsWithQuery",
"StaticArgument",
"StreamGeotemporalSeriesValuesRequest",
diff --git a/foundry_sdk/v2/sql_queries/sql_query.py b/foundry_sdk/v2/sql_queries/sql_query.py
index c8c109c8..488edae5 100644
--- a/foundry_sdk/v2/sql_queries/sql_query.py
+++ b/foundry_sdk/v2/sql_queries/sql_query.py
@@ -289,7 +289,7 @@ def get_results(
"QueryRunning": sql_queries_errors.QueryRunning,
"ReadQueryInputsPermissionDenied": sql_queries_errors.ReadQueryInputsPermissionDenied,
},
- response_mode=_sdk_internal.get("response_mode", "TABLE"),
+ response_mode=_sdk_internal.get("response_mode", "ARROW_TABLE"),
),
)
@@ -644,7 +644,7 @@ def get_results(
"QueryRunning": sql_queries_errors.QueryRunning,
"ReadQueryInputsPermissionDenied": sql_queries_errors.ReadQueryInputsPermissionDenied,
},
- response_mode=_sdk_internal.get("response_mode", "TABLE"),
+ response_mode=_sdk_internal.get("response_mode", "ARROW_TABLE"),
),
)
diff --git a/tests/test_resorce_import.py b/tests/test_resorce_import.py
index f49f62da..dc015927 100644
--- a/tests/test_resorce_import.py
+++ b/tests/test_resorce_import.py
@@ -366,6 +366,32 @@ def test_media_sets_v2_media_set_import():
assert MediaSetClient is not None
+def test_models_v2_experiment_import():
+ from foundry_sdk.v2.models.experiment import ExperimentClient
+
+ assert ExperimentClient is not None
+
+
+def test_models_v2_experiment_artifact_table_import():
+ from foundry_sdk.v2.models.experiment_artifact_table import (
+ ExperimentArtifactTableClient,
+ ) # NOQA
+
+ assert ExperimentArtifactTableClient is not None
+
+
+def test_models_v2_experiment_series_import():
+ from foundry_sdk.v2.models.experiment_series import ExperimentSeriesClient
+
+ assert ExperimentSeriesClient is not None
+
+
+def test_models_v2_live_deployment_import():
+ from foundry_sdk.v2.models.live_deployment import LiveDeploymentClient
+
+ assert LiveDeploymentClient is not None
+
+
def test_models_v2_model_import():
from foundry_sdk.v2.models.model import ModelClient