From c97b57b68db3b202789728118ef3a8f700c6e052 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 02:16:02 +0000 Subject: [PATCH 1/7] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index 4f98060..84b1878 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 2 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-d58ccd91625a3b12fd8d1ceece128b604010bd840096000287c927cb5dcf79eb.yml -openapi_spec_hash: 22c8c973d55f26649e9df96c89ea537f +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-f018b83ac489bceb321bc988b6367539365fb59a70fe031f5768d5b50a3ceffa.yml +openapi_spec_hash: e2cecb1f8f97c362436925e6750fed9d config_hash: 1d603d50b7183a492ad6df5f728a1863 From b6b9bb3029d5c3e3c795e430989d3d9177027aba Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 02:32:41 +0000 Subject: [PATCH 2/7] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index 84b1878..5e1a5ef 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 2 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-f018b83ac489bceb321bc988b6367539365fb59a70fe031f5768d5b50a3ceffa.yml -openapi_spec_hash: e2cecb1f8f97c362436925e6750fed9d +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-c224b4437a8a19d29aa29506fdb44ffc5316d5ba3600feee90678acdb2557e23.yml +openapi_spec_hash: 6b74504e14f7e1aa631e6bd76d596f48 config_hash: 1d603d50b7183a492ad6df5f728a1863 From 2f992e788860d16739438a021bd8825a7999b1e4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 02:30:28 +0000 Subject: [PATCH 3/7] chore(internal): bump pyright version --- pyproject.toml | 2 +- requirements-dev.lock | 2 +- src/isaacus/_base_client.py | 6 +++++- src/isaacus/_models.py | 1 - src/isaacus/_utils/_typing.py | 2 +- tests/conftest.py | 2 +- tests/test_models.py | 2 +- 7 files changed, 10 insertions(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1e7ab52..01f9e97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ Repository = "https://github.com/isaacus-dev/isaacus-python" managed = true # version pins are in requirements-dev.lock dev-dependencies = [ - "pyright>=1.1.359", + "pyright==1.1.399", "mypy", "respx", "pytest", diff --git a/requirements-dev.lock b/requirements-dev.lock index 898de03..8394bf4 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -69,7 +69,7 @@ pydantic-core==2.27.1 # via pydantic pygments==2.18.0 # via rich -pyright==1.1.392.post0 +pyright==1.1.399 pytest==8.3.3 # via pytest-asyncio pytest-asyncio==0.24.0 diff --git a/src/isaacus/_base_client.py b/src/isaacus/_base_client.py index 4353bb5..34d082e 100644 --- a/src/isaacus/_base_client.py +++ b/src/isaacus/_base_client.py @@ -98,7 +98,11 @@ _AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any]) if TYPE_CHECKING: - from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT + from httpx._config import ( + DEFAULT_TIMEOUT_CONFIG, # pyright: ignore[reportPrivateImportUsage] + ) + + HTTPX_DEFAULT_TIMEOUT = DEFAULT_TIMEOUT_CONFIG else: try: from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT diff --git a/src/isaacus/_models.py b/src/isaacus/_models.py index 3493571..58b9263 100644 --- a/src/isaacus/_models.py +++ b/src/isaacus/_models.py @@ -19,7 +19,6 @@ ) import pydantic -import pydantic.generics from pydantic.fields import FieldInfo from ._types import ( diff --git a/src/isaacus/_utils/_typing.py b/src/isaacus/_utils/_typing.py index 1958820..1bac954 100644 --- a/src/isaacus/_utils/_typing.py +++ b/src/isaacus/_utils/_typing.py @@ -110,7 +110,7 @@ class MyResponse(Foo[_T]): ``` """ cls = cast(object, get_origin(typ) or typ) - if cls in generic_bases: + if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains] # we're given the class directly return extract_type_arg(typ, index) diff --git a/tests/conftest.py b/tests/conftest.py index bdae75a..31f2add 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,7 +10,7 @@ from isaacus import Isaacus, AsyncIsaacus if TYPE_CHECKING: - from _pytest.fixtures import FixtureRequest + from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] pytest.register_assert_rewrite("tests.utils") diff --git a/tests/test_models.py b/tests/test_models.py index f465b46..f38887a 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -832,7 +832,7 @@ class B(BaseModel): @pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") def test_type_alias_type() -> None: - Alias = TypeAliasType("Alias", str) + Alias = TypeAliasType("Alias", str) # pyright: ignore class Model(BaseModel): alias: Alias From bb3df7823dd27e6482b5e97ef17019ee0a1e596c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 02:34:03 +0000 Subject: [PATCH 4/7] chore(internal): update models test --- tests/test_models.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_models.py b/tests/test_models.py index f38887a..ff05998 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -492,12 +492,15 @@ class Model(BaseModel): resource_id: Optional[str] = None m = Model.construct() + assert m.resource_id is None assert "resource_id" not in m.model_fields_set m = Model.construct(resource_id=None) + assert m.resource_id is None assert "resource_id" in m.model_fields_set m = Model.construct(resource_id="foo") + assert m.resource_id == "foo" assert "resource_id" in m.model_fields_set From 4fb2535407d88d51c1db1e9a37c9ea767cdf06c0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 03:42:42 +0000 Subject: [PATCH 5/7] feat(api)!: made universal classification endpoint multi-input only --- .stats.yml | 4 +- README.md | 22 ++++----- .../resources/classifications/universal.py | 46 +++++++++--------- .../universal_classification.py | 48 ++++++++++++++----- .../universal_create_params.py | 20 ++++---- .../classifications/test_universal.py | 16 +++---- tests/test_client.py | 20 ++++---- 7 files changed, 100 insertions(+), 76 deletions(-) diff --git a/.stats.yml b/.stats.yml index 5e1a5ef..d6a9380 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 2 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-c224b4437a8a19d29aa29506fdb44ffc5316d5ba3600feee90678acdb2557e23.yml -openapi_spec_hash: 6b74504e14f7e1aa631e6bd76d596f48 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-213d554b23f35e746460af23dd32bdde471230549ad223518c86d42ea917a180.yml +openapi_spec_hash: 3672281fe031a42fc59e3a2af758a8f8 config_hash: 1d603d50b7183a492ad6df5f728a1863 diff --git a/README.md b/README.md index b7c2c41..20666b5 100644 --- a/README.md +++ b/README.md @@ -34,9 +34,9 @@ client = Isaacus( universal_classification = client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) -print(universal_classification.chunks) +print(universal_classification.classifications) ``` While you can provide an `api_key` keyword argument, @@ -62,9 +62,9 @@ async def main() -> None: universal_classification = await client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) - print(universal_classification.chunks) + print(universal_classification.classifications) asyncio.run(main()) @@ -93,7 +93,7 @@ client = Isaacus() universal_classification = client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], chunking_options={ "overlap_ratio": 0.1, "overlap_tokens": None, @@ -122,7 +122,7 @@ try: client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) except isaacus.APIConnectionError as e: print("The server could not be reached") @@ -169,7 +169,7 @@ client = Isaacus( client.with_options(max_retries=5).classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) ``` @@ -196,7 +196,7 @@ client = Isaacus( client.with_options(timeout=5.0).classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) ``` @@ -241,12 +241,12 @@ client = Isaacus() response = client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) print(response.headers.get('X-My-Header')) universal = response.parse() # get the object that `classifications.universal.create()` would have returned -print(universal.chunks) +print(universal.classifications) ``` These methods return an [`APIResponse`](https://github.com/isaacus-dev/isaacus-python/tree/main/src/isaacus/_response.py) object. @@ -263,7 +263,7 @@ To stream the response body, use `.with_streaming_response` instead, which requi with client.classifications.universal.with_streaming_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) as response: print(response.headers.get("X-My-Header")) diff --git a/src/isaacus/resources/classifications/universal.py b/src/isaacus/resources/classifications/universal.py index 7342fa3..fd3c76b 100644 --- a/src/isaacus/resources/classifications/universal.py +++ b/src/isaacus/resources/classifications/universal.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Optional +from typing import List, Optional from typing_extensions import Literal import httpx @@ -52,7 +52,7 @@ def create( *, model: Literal["kanon-universal-classifier", "kanon-universal-classifier-mini"], query: str, - text: str, + texts: List[str], chunking_options: Optional[universal_create_params.ChunkingOptions] | NotGiven = NOT_GIVEN, is_iql: bool | NotGiven = NOT_GIVEN, scoring_method: Literal["auto", "chunk_max", "chunk_avg", "chunk_min"] | NotGiven = NOT_GIVEN, @@ -64,7 +64,7 @@ def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> UniversalClassification: """ - Classify the relevance of a legal document to a query with an Isaacus universal + Classify the relevance of legal documents to a query with an Isaacus universal legal AI classifier. Args: @@ -72,16 +72,16 @@ def create( to use for universal classification. query: The [Isaacus Query Language (IQL)](https://docs.isaacus.com/iql) query or, if - IQL is disabled, the statement, to evaluate the text against. + IQL is disabled, the statement, to evaluate the texts against. The query must contain at least one non-whitespace character. - Unlike the text being classified, the query cannot be so long that it exceeds + Unlike the texts being classified, the query cannot be so long that it exceeds the maximum input length of the universal classifier. - text: The text to classify. + texts: The texts to classify. - The text must contain at least one non-whitespace character. + The texts must contain at least one non-whitespace character. chunking_options: Options for how to split text into smaller chunks. @@ -92,13 +92,13 @@ def create( `auto` is the default scoring method and is recommended for most use cases. Currently, it is equivalent to `chunk_max`. In the future, it will automatically - select the best method based on the model and input. + select the best method based on the model and inputs. - `chunk_max` uses the highest confidence score of all of the text's chunks. + `chunk_max` uses the highest confidence score of all of the texts' chunks. - `chunk_avg` averages the confidence scores of all of the text's chunks. + `chunk_avg` averages the confidence scores of all of the texts' chunks. - `chunk_min` uses the lowest confidence score of all of the text's chunks. + `chunk_min` uses the lowest confidence score of all of the texts' chunks. extra_headers: Send extra headers @@ -114,7 +114,7 @@ def create( { "model": model, "query": query, - "text": text, + "texts": texts, "chunking_options": chunking_options, "is_iql": is_iql, "scoring_method": scoring_method, @@ -153,7 +153,7 @@ async def create( *, model: Literal["kanon-universal-classifier", "kanon-universal-classifier-mini"], query: str, - text: str, + texts: List[str], chunking_options: Optional[universal_create_params.ChunkingOptions] | NotGiven = NOT_GIVEN, is_iql: bool | NotGiven = NOT_GIVEN, scoring_method: Literal["auto", "chunk_max", "chunk_avg", "chunk_min"] | NotGiven = NOT_GIVEN, @@ -165,7 +165,7 @@ async def create( timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> UniversalClassification: """ - Classify the relevance of a legal document to a query with an Isaacus universal + Classify the relevance of legal documents to a query with an Isaacus universal legal AI classifier. Args: @@ -173,16 +173,16 @@ async def create( to use for universal classification. query: The [Isaacus Query Language (IQL)](https://docs.isaacus.com/iql) query or, if - IQL is disabled, the statement, to evaluate the text against. + IQL is disabled, the statement, to evaluate the texts against. The query must contain at least one non-whitespace character. - Unlike the text being classified, the query cannot be so long that it exceeds + Unlike the texts being classified, the query cannot be so long that it exceeds the maximum input length of the universal classifier. - text: The text to classify. + texts: The texts to classify. - The text must contain at least one non-whitespace character. + The texts must contain at least one non-whitespace character. chunking_options: Options for how to split text into smaller chunks. @@ -193,13 +193,13 @@ async def create( `auto` is the default scoring method and is recommended for most use cases. Currently, it is equivalent to `chunk_max`. In the future, it will automatically - select the best method based on the model and input. + select the best method based on the model and inputs. - `chunk_max` uses the highest confidence score of all of the text's chunks. + `chunk_max` uses the highest confidence score of all of the texts' chunks. - `chunk_avg` averages the confidence scores of all of the text's chunks. + `chunk_avg` averages the confidence scores of all of the texts' chunks. - `chunk_min` uses the lowest confidence score of all of the text's chunks. + `chunk_min` uses the lowest confidence score of all of the texts' chunks. extra_headers: Send extra headers @@ -215,7 +215,7 @@ async def create( { "model": model, "query": query, - "text": text, + "texts": texts, "chunking_options": chunking_options, "is_iql": is_iql, "scoring_method": scoring_method, diff --git a/src/isaacus/types/classifications/universal_classification.py b/src/isaacus/types/classifications/universal_classification.py index 0a7a8ef..ead7f39 100644 --- a/src/isaacus/types/classifications/universal_classification.py +++ b/src/isaacus/types/classifications/universal_classification.py @@ -4,15 +4,22 @@ from ..._models import BaseModel -__all__ = ["UniversalClassification", "Chunk", "Usage"] +__all__ = ["UniversalClassification", "Classification", "ClassificationChunk", "Usage"] -class Chunk(BaseModel): +class ClassificationChunk(BaseModel): end: int - """The end index of the chunk in the original text.""" + """ + The index of the character in the original text where the chunk ends, beginning + from `0` (such that, in Python, the chunk is equivalent to `text[start:end+1]`). + """ index: int - """The index of the chunk in the list of chunks.""" + """ + The original position of the chunk in the outputted list of chunks before + sorting, starting from `0` (and, therefore, ending at the number of chunks minus + `1`). + """ score: float """ @@ -24,19 +31,17 @@ class Chunk(BaseModel): """ start: int - """The start index of the chunk in the original text.""" + """ + The index of the character in the original text where the chunk starts, + beginning from `0`. + """ text: str """The text of the chunk.""" -class Usage(BaseModel): - input_tokens: int - """The number of tokens inputted to the model.""" - - -class UniversalClassification(BaseModel): - chunks: Optional[List[Chunk]] = None +class Classification(BaseModel): + chunks: Optional[List[ClassificationChunk]] = None """ The text as broken into chunks by [semchunk](https://github.com/isaacus-dev/semchunk), each chunk with its own @@ -45,6 +50,12 @@ class UniversalClassification(BaseModel): If no chunking occurred, this will be `null`. """ + index: int + """ + The index of the text in the input array of texts, starting from `0` (and, + therefore, ending at the number of texts minus `1`). + """ + score: float """ A score of the likelihood that the query expressed about the text is supported @@ -54,5 +65,18 @@ class UniversalClassification(BaseModel): score less than `0.5` indicates that the text does not support the query. """ + +class Usage(BaseModel): + input_tokens: int + """The number of tokens inputted to the model.""" + + +class UniversalClassification(BaseModel): + classifications: List[Classification] + """ + The classifications of the texts, by relevance to the query, in order from + highest to lowest relevance score. + """ + usage: Usage """Statistics about the usage of resources in the process of classifying the text.""" diff --git a/src/isaacus/types/classifications/universal_create_params.py b/src/isaacus/types/classifications/universal_create_params.py index ae0e53c..e699ac8 100644 --- a/src/isaacus/types/classifications/universal_create_params.py +++ b/src/isaacus/types/classifications/universal_create_params.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Optional +from typing import List, Optional from typing_extensions import Literal, Required, TypedDict __all__ = ["UniversalCreateParams", "ChunkingOptions"] @@ -18,18 +18,18 @@ class UniversalCreateParams(TypedDict, total=False): query: Required[str] """ The [Isaacus Query Language (IQL)](https://docs.isaacus.com/iql) query or, if - IQL is disabled, the statement, to evaluate the text against. + IQL is disabled, the statement, to evaluate the texts against. The query must contain at least one non-whitespace character. - Unlike the text being classified, the query cannot be so long that it exceeds + Unlike the texts being classified, the query cannot be so long that it exceeds the maximum input length of the universal classifier. """ - text: Required[str] - """The text to classify. + texts: Required[List[str]] + """The texts to classify. - The text must contain at least one non-whitespace character. + The texts must contain at least one non-whitespace character. """ chunking_options: Optional[ChunkingOptions] @@ -46,13 +46,13 @@ class UniversalCreateParams(TypedDict, total=False): `auto` is the default scoring method and is recommended for most use cases. Currently, it is equivalent to `chunk_max`. In the future, it will automatically - select the best method based on the model and input. + select the best method based on the model and inputs. - `chunk_max` uses the highest confidence score of all of the text's chunks. + `chunk_max` uses the highest confidence score of all of the texts' chunks. - `chunk_avg` averages the confidence scores of all of the text's chunks. + `chunk_avg` averages the confidence scores of all of the texts' chunks. - `chunk_min` uses the lowest confidence score of all of the text's chunks. + `chunk_min` uses the lowest confidence score of all of the texts' chunks. """ diff --git a/tests/api_resources/classifications/test_universal.py b/tests/api_resources/classifications/test_universal.py index ff7fe4d..8ed4a0e 100644 --- a/tests/api_resources/classifications/test_universal.py +++ b/tests/api_resources/classifications/test_universal.py @@ -23,7 +23,7 @@ def test_method_create(self, client: Isaacus) -> None: universal = client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) assert_matches_type(UniversalClassification, universal, path=["response"]) @@ -33,7 +33,7 @@ def test_method_create_with_all_params(self, client: Isaacus) -> None: universal = client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], chunking_options={ "overlap_ratio": 0.1, "overlap_tokens": 0, @@ -50,7 +50,7 @@ def test_raw_response_create(self, client: Isaacus) -> None: response = client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) assert response.is_closed is True @@ -64,7 +64,7 @@ def test_streaming_response_create(self, client: Isaacus) -> None: with client.classifications.universal.with_streaming_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -84,7 +84,7 @@ async def test_method_create(self, async_client: AsyncIsaacus) -> None: universal = await async_client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) assert_matches_type(UniversalClassification, universal, path=["response"]) @@ -94,7 +94,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncIsaacus) - universal = await async_client.classifications.universal.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], chunking_options={ "overlap_ratio": 0.1, "overlap_tokens": 0, @@ -111,7 +111,7 @@ async def test_raw_response_create(self, async_client: AsyncIsaacus) -> None: response = await async_client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) assert response.is_closed is True @@ -125,7 +125,7 @@ async def test_streaming_response_create(self, async_client: AsyncIsaacus) -> No async with async_client.classifications.universal.with_streaming_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/test_client.py b/tests/test_client.py index bd2060b..338d3e8 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -723,7 +723,7 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> No dict( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ), UniversalCreateParams, ), @@ -748,7 +748,7 @@ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> Non dict( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ), UniversalCreateParams, ), @@ -788,7 +788,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: response = client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) assert response.retries_taken == failures_before_success @@ -816,7 +816,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: response = client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], extra_headers={"x-stainless-retry-count": Omit()}, ) @@ -844,7 +844,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: response = client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], extra_headers={"x-stainless-retry-count": "42"}, ) @@ -1533,7 +1533,7 @@ async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) dict( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ), UniversalCreateParams, ), @@ -1558,7 +1558,7 @@ async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) dict( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ), UniversalCreateParams, ), @@ -1599,7 +1599,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: response = await client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], ) assert response.retries_taken == failures_before_success @@ -1628,7 +1628,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: response = await client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], extra_headers={"x-stainless-retry-count": Omit()}, ) @@ -1657,7 +1657,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: response = await client.classifications.universal.with_raw_response.create( model="kanon-universal-classifier", query="This is a confidentiality clause.", - text="I agree not to tell anyone about the document.", + texts=["I agree not to tell anyone about the document."], extra_headers={"x-stainless-retry-count": "42"}, ) From 7237787d3d4971bf0b0fd6bf576c3d1c84473a51 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 03:44:40 +0000 Subject: [PATCH 6/7] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index d6a9380..43b9aae 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 2 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/isaacus%2Fisaacus-213d554b23f35e746460af23dd32bdde471230549ad223518c86d42ea917a180.yml openapi_spec_hash: 3672281fe031a42fc59e3a2af758a8f8 -config_hash: 1d603d50b7183a492ad6df5f728a1863 +config_hash: 1d15d860383a3f6da1ac388297687cc9 From ca3e139785ff256ab114f4be74ab6704e74abfd5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 03:47:18 +0000 Subject: [PATCH 7/7] release: 0.4.0 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 18 ++++++++++++++++++ pyproject.toml | 2 +- src/isaacus/_version.py | 2 +- 4 files changed, 21 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b0cf8d0..da59f99 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.3.3" + ".": "0.4.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d21e30..4789767 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## 0.4.0 (2025-04-19) + +Full Changelog: [v0.3.3...v0.4.0](https://github.com/isaacus-dev/isaacus-python/compare/v0.3.3...v0.4.0) + +### ⚠ BREAKING CHANGES + +* **api:** made universal classification endpoint multi-input only + +### Features + +* **api:** made universal classification endpoint multi-input only ([4fb2535](https://github.com/isaacus-dev/isaacus-python/commit/4fb2535407d88d51c1db1e9a37c9ea767cdf06c0)) + + +### Chores + +* **internal:** bump pyright version ([2f992e7](https://github.com/isaacus-dev/isaacus-python/commit/2f992e788860d16739438a021bd8825a7999b1e4)) +* **internal:** update models test ([bb3df78](https://github.com/isaacus-dev/isaacus-python/commit/bb3df7823dd27e6482b5e97ef17019ee0a1e596c)) + ## 0.3.3 (2025-04-16) Full Changelog: [v0.3.2...v0.3.3](https://github.com/isaacus-dev/isaacus-python/compare/v0.3.2...v0.3.3) diff --git a/pyproject.toml b/pyproject.toml index 01f9e97..8b27e05 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "isaacus" -version = "0.3.3" +version = "0.4.0" description = "The official Python library for the isaacus API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/isaacus/_version.py b/src/isaacus/_version.py index 10806d6..0d9b4ca 100644 --- a/src/isaacus/_version.py +++ b/src/isaacus/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "isaacus" -__version__ = "0.3.3" # x-release-please-version +__version__ = "0.4.0" # x-release-please-version