From f96932e1da7980281c73f59e3b1bbd1c8d7f9b93 Mon Sep 17 00:00:00 2001 From: SentienceDEV Date: Thu, 22 Jan 2026 09:28:08 -0800 Subject: [PATCH 1/2] deep infra models --- sentience/llm_provider.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/sentience/llm_provider.py b/sentience/llm_provider.py index 8939b8d..c8d797a 100644 --- a/sentience/llm_provider.py +++ b/sentience/llm_provider.py @@ -319,6 +319,26 @@ def model_name(self) -> str: return self._model_name +class DeepInfraProvider(OpenAIProvider): + """ + DeepInfra provider via OpenAI-compatible API. + + Uses DeepInfra's OpenAI-compatible endpoint: + https://api.deepinfra.com/v1/openai + + API token is read from DEEPINFRA_TOKEN if not provided. + """ + + def __init__( + self, + api_key: str | None = None, + model: str = "meta-llama/Meta-Llama-3-8B-Instruct", + base_url: str = "https://api.deepinfra.com/v1/openai", + ): + api_key = get_api_key_from_env(["DEEPINFRA_TOKEN"], api_key) + super().__init__(api_key=api_key, model=model, base_url=base_url) + + class AnthropicProvider(LLMProvider): """ Anthropic provider implementation (Claude 3 Opus, Sonnet, Haiku, etc.) From 188231c6625a88559f9be661b43cbfdb678fce6f Mon Sep 17 00:00:00 2001 From: SentienceDEV Date: Thu, 22 Jan 2026 21:33:43 -0800 Subject: [PATCH 2/2] update to take api key or token --- sentience/__init__.py | 2 ++ sentience/llm_provider.py | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/sentience/__init__.py b/sentience/__init__.py index e382fb8..a5e5b5e 100644 --- a/sentience/__init__.py +++ b/sentience/__init__.py @@ -64,6 +64,7 @@ from .inspector import Inspector, inspect from .llm_provider import ( AnthropicProvider, + DeepInfraProvider, LLMProvider, LLMResponse, LocalLLMProvider, @@ -227,6 +228,7 @@ "LLMResponse", "OpenAIProvider", "AnthropicProvider", + "DeepInfraProvider", "LocalLLMProvider", "LocalVisionLLMProvider", "MLXVLMProvider", diff --git a/sentience/llm_provider.py b/sentience/llm_provider.py index c8d797a..4874e47 100644 --- a/sentience/llm_provider.py +++ b/sentience/llm_provider.py @@ -326,7 +326,7 @@ class DeepInfraProvider(OpenAIProvider): Uses DeepInfra's OpenAI-compatible endpoint: https://api.deepinfra.com/v1/openai - API token is read from DEEPINFRA_TOKEN if not provided. + API token is read from DEEPINFRA_TOKEN or DEEPINFRA_API_KEY if not provided. """ def __init__( @@ -335,7 +335,9 @@ def __init__( model: str = "meta-llama/Meta-Llama-3-8B-Instruct", base_url: str = "https://api.deepinfra.com/v1/openai", ): - api_key = get_api_key_from_env(["DEEPINFRA_TOKEN"], api_key) + api_key = get_api_key_from_env( + ["DEEPINFRA_TOKEN", "DEEPINFRA_API_KEY"], api_key + ) super().__init__(api_key=api_key, model=model, base_url=base_url)