Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions apisix/plugins/ai-drivers/openrouter.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
--
-- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements. See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to You under the Apache License, Version 2.0
-- (the "License"); you may not use this file except in compliance with
-- the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--

return require("apisix.plugins.ai-drivers.openai-base").new(
{
host = "openrouter.ai",
path = "/api/v1/chat/completions",
port = 443
}
)
36 changes: 24 additions & 12 deletions apisix/plugins/ai-drivers/schema.lua
Original file line number Diff line number Diff line change
Expand Up @@ -41,21 +41,33 @@ local openai_compatible_chat_schema = {
required = {"messages"}
}

_M.chat_request_schema = {
["openai"] = openai_compatible_chat_schema,
["deepseek"] = openai_compatible_chat_schema,
["openai-compatible"] = openai_compatible_chat_schema,
["azure-openai"] = openai_compatible_chat_schema
local openai_compatible_list = {
"openai",
"deepseek",
"aimlapi",
"openai-compatible",
"azure-openai",
"openrouter",
}

function _M.is_openai_compatible_provider(provider)
if provider == "openai" or
provider == "deepseek" or
provider == "openai-compatible" or
provider == "azure-openai" then
return true
-- Export list of all providers
-- currently all are OpenAI-compatible
-- If incompatible providers with OpenAI API are added,
-- please merge these lists and still export from this variable.
_M.providers = openai_compatible_list

_M.chat_request_schema = {}

do
local openai_compatible_kv = {}
for _, provider in ipairs(openai_compatible_list) do
_M.chat_request_schema[provider] = openai_compatible_chat_schema
openai_compatible_kv[provider] = true
end

function _M.is_openai_compatible_provider(provider)
return openai_compatible_kv[provider] == true
end
return false
end

return _M
18 changes: 3 additions & 15 deletions apisix/plugins/ai-proxy/schema.lua
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
-- limitations under the License.
--
local schema_def = require("apisix.schema_def")
local ai_drivers_schema = require("apisix.plugins.ai-drivers.schema")

local _M = {}

Expand Down Expand Up @@ -63,13 +64,7 @@ local ai_instance_schema = {
provider = {
type = "string",
description = "Type of the AI service instance.",
enum = {
"openai",
"deepseek",
"aimlapi",
"openai-compatible",
"azure-openai"
}, -- add more providers later
enum = ai_drivers_schema.providers,
},
priority = {
type = "integer",
Expand Down Expand Up @@ -125,14 +120,7 @@ _M.ai_proxy_schema = {
provider = {
type = "string",
description = "Type of the AI service instance.",
enum = {
"openai",
"deepseek",
"aimlapi",
"openai-compatible",
"azure-openai"
}, -- add more providers later

enum = ai_drivers_schema.providers,
},
logging = logging_schema,
auth = auth_schema,
Expand Down
8 changes: 2 additions & 6 deletions apisix/plugins/ai-request-rewrite.lua
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
-- limitations under the License.
--
local core = require("apisix.core")
local ai_drivers_schema = require("apisix.plugins.ai-drivers.schema")
local require = require
local pcall = pcall
local ngx = ngx
Expand Down Expand Up @@ -63,12 +64,7 @@ local schema = {
provider = {
type = "string",
description = "Name of the AI service provider.",
enum = {
"openai",
"openai-compatible",
"deepseek",
"aimlapi"
} -- add more providers later
enum = ai_drivers_schema.providers,
},
auth = auth_schema,
options = model_options_schema,
Expand Down
6 changes: 3 additions & 3 deletions docs/en/latest/plugins/ai-proxy-multi.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ keywords:
- ai-proxy-multi
- AI
- LLM
description: The ai-proxy-multi Plugin extends the capabilities of ai-proxy with load balancing, retries, fallbacks, and health chekcs, simplifying the integration with OpenAI, DeepSeek, Azure, AIMLAPI, and other OpenAI-compatible APIs.
description: The ai-proxy-multi Plugin extends the capabilities of ai-proxy with load balancing, retries, fallbacks, and health chekcs, simplifying the integration with OpenAI, DeepSeek, Azure, AIMLAPI, OpenRouter and other OpenAI-compatible APIs.
---

<!--
Expand Down Expand Up @@ -35,7 +35,7 @@ description: The ai-proxy-multi Plugin extends the capabilities of ai-proxy with

## Description

The `ai-proxy-multi` Plugin simplifies access to LLM and embedding models by transforming Plugin configurations into the designated request format for OpenAI, DeepSeek, Azure, AIMLAPI, and other OpenAI-compatible APIs. It extends the capabilities of [`ai-proxy`](./ai-proxy.md) with load balancing, retries, fallbacks, and health checks.
The `ai-proxy-multi` Plugin simplifies access to LLM and embedding models by transforming Plugin configurations into the designated request format for OpenAI, DeepSeek, Azure, AIMLAPI, OpenRouter, and other OpenAI-compatible APIs. It extends the capabilities of [`ai-proxy`](./ai-proxy.md) with load balancing, retries, fallbacks, and health checks.

In addition, the Plugin also supports logging LLM request information in the access log, such as token usage, model, time to the first response, and more.

Expand All @@ -58,7 +58,7 @@ In addition, the Plugin also supports logging LLM request information in the acc
| balancer.key | string | False | | | Used when `type` is `chash`. When `hash_on` is set to `header` or `cookie`, `key` is required. When `hash_on` is set to `consumer`, `key` is not required as the consumer name will be used as the key automatically. |
| instances | array[object] | True | | | LLM instance configurations. |
| instances.name | string | True | | | Name of the LLM service instance. |
| instances.provider | string | True | | [openai, deepseek, azure-openai, aimlapi, openai-compatible] | LLM service provider. When set to `openai`, the Plugin will proxy the request to `api.openai.com`. When set to `deepseek`, the Plugin will proxy the request to `api.deepseek.com`. When set to `aimlapi`, the Plugin uses the OpenAI-compatible driver and proxies the request to `api.aimlapi.com` by default. When set to `openai-compatible`, the Plugin will proxy the request to the custom endpoint configured in `override`. |
| instances.provider | string | True | | [openai, deepseek, azure-openai, aimlapi, openrouter, openai-compatible] | LLM service provider. When set to `openai`, the Plugin will proxy the request to `api.openai.com`. When set to `deepseek`, the Plugin will proxy the request to `api.deepseek.com`. When set to `aimlapi`, the Plugin uses the OpenAI-compatible driver and proxies the request to `api.aimlapi.com` by default. When set to `openrouter`, the Plugin uses the OpenAI-compatible driver and proxies the request to `openrouter.ai` by default. When set to `openai-compatible`, the Plugin will proxy the request to the custom endpoint configured in `override`. |
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The azure-openai PR didn't add description for the option - consider adding it here? I think it does something like this

When set to azure-openai, the plugin proxies requests to the custom endpoint configured in override and additionally removes the model parameter from user requests.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be handled in a separate PR.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(when I have time. Or I will create a issue.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok

| instances.priority | integer | False | 0 | | Priority of the LLM instance in load balancing. `priority` takes precedence over `weight`. |
| instances.weight | string | True | 0 | greater or equal to 0 | Weight of the LLM instance in load balancing. |
| instances.auth | object | True | | | Authentication configurations. |
Expand Down
6 changes: 3 additions & 3 deletions docs/en/latest/plugins/ai-proxy.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ keywords:
- ai-proxy
- AI
- LLM
description: The ai-proxy Plugin simplifies access to LLM and embedding models providers by converting Plugin configurations into the required request format for OpenAI, DeepSeek, Azure, AIMLAPI, and other OpenAI-compatible APIs.
description: The ai-proxy Plugin simplifies access to LLM and embedding models providers by converting Plugin configurations into the required request format for OpenAI, DeepSeek, Azure, AIMLAPI, OpenRouter, and other OpenAI-compatible APIs.
---

<!--
Expand Down Expand Up @@ -35,7 +35,7 @@ description: The ai-proxy Plugin simplifies access to LLM and embedding models p

## Description

The `ai-proxy` Plugin simplifies access to LLM and embedding models by transforming Plugin configurations into the designated request format. It supports the integration with OpenAI, DeepSeek, Azure, AIMLAPI, and other OpenAI-compatible APIs.
The `ai-proxy` Plugin simplifies access to LLM and embedding models by transforming Plugin configurations into the designated request format. It supports the integration with OpenAI, DeepSeek, Azure, AIMLAPI, OpenRouter, and other OpenAI-compatible APIs.

In addition, the Plugin also supports logging LLM request information in the access log, such as token usage, model, time to the first response, and more.

Expand All @@ -51,7 +51,7 @@ In addition, the Plugin also supports logging LLM request information in the acc

| Name | Type | Required | Default | Valid values | Description |
|--------------------|--------|----------|---------|------------------------------------------|-------------|
| provider | string | True | | [openai, deepseek, azure-openai, aimlapi, openai-compatible] | LLM service provider. When set to `openai`, the Plugin will proxy the request to `https://api.openai.com/chat/completions`. When set to `deepseek`, the Plugin will proxy the request to `https://api.deepseek.com/chat/completions`. When set to `aimlapi`, the Plugin uses the OpenAI-compatible driver and proxies the request to `https://api.aimlapi.com/v1/chat/completions` by default. When set to `openai-compatible`, the Plugin will proxy the request to the custom endpoint configured in `override`. |
| provider | string | True | | [openai, deepseek, azure-openai, aimlapi, openrouter, openai-compatible] | LLM service provider. When set to `openai`, the Plugin will proxy the request to `https://api.openai.com/chat/completions`. When set to `deepseek`, the Plugin will proxy the request to `https://api.deepseek.com/chat/completions`. When set to `aimlapi`, the Plugin uses the OpenAI-compatible driver and proxies the request to `https://api.aimlapi.com/v1/chat/completions` by default. When set to `openrouter`, the Plugin uses the OpenAI-compatible driver and proxies the request to `https://openrouter.ai/api/v1/chat/completions` by default. When set to `openai-compatible`, the Plugin will proxy the request to the custom endpoint configured in `override`. |
| auth | object | True | | | Authentication configurations. |
| auth.header | object | False | | | Authentication headers. At least one of `header` or `query` must be configured. |
| auth.query | object | False | | | Authentication query parameters. At least one of `header` or `query` must be configured. |
Expand Down
2 changes: 1 addition & 1 deletion docs/en/latest/plugins/ai-request-rewrite.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ The `ai-request-rewrite` plugin intercepts client requests before they are forwa
| **Field** | **Required** | **Type** | **Description** |
| ------------------------- | ------------ | -------- | ------------------------------------------------------------------------------------ |
| prompt | Yes | String | The prompt send to LLM service. |
| provider | Yes | String | Name of the LLM service. Available options: openai, deekseek, azure-openai, aimlapi and openai-compatible. When `aimlapi` is selected, the plugin uses the OpenAI-compatible driver with a default endpoint of `https://api.aimlapi.com/v1/chat/completions`. |
| provider | Yes | String | Name of the LLM service. Available options: openai, deekseek, azure-openai, aimlapi, openrouter and openai-compatible. When `aimlapi` is selected, the plugin uses the OpenAI-compatible driver with a default endpoint of `https://api.aimlapi.com/v1/chat/completions`. |
Copy link

Copilot AI Jan 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a typo in "deekseek" - it should be "deepseek" for consistency with the actual provider name used throughout the codebase.

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be fixed in another pr.

Copy link
Copy Markdown
Member

@kayx23 kayx23 Jan 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The provider description for ai-request-rewrite is not great - it only explained aimlapi. This should be noted and improved (maybe in a different PR?) cc: @Yilialinn

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes.

| auth | Yes | Object | Authentication configuration |
| auth.header | No | Object | Authentication headers. Key must match pattern `^[a-zA-Z0-9._-]+$`. |
| auth.query | No | Object | Authentication query parameters. Key must match pattern `^[a-zA-Z0-9._-]+$`. |
Expand Down
6 changes: 3 additions & 3 deletions docs/zh/latest/plugins/ai-proxy-multi.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ keywords:
- ai-proxy-multi
- AI
- LLM
description: ai-proxy-multi 插件通过负载均衡、重试、故障转移和健康检查扩展了 ai-proxy 的功能,简化了与 OpenAI、DeepSeek、Azure、AIMLAPI 和其他 OpenAI 兼容 API 的集成。
description: ai-proxy-multi 插件通过负载均衡、重试、故障转移和健康检查扩展了 ai-proxy 的功能,简化了与 OpenAI、DeepSeek、Azure、AIMLAPI、OpenRouter 和其他 OpenAI 兼容 API 的集成。
---

<!--
Expand Down Expand Up @@ -35,7 +35,7 @@ description: ai-proxy-multi 插件通过负载均衡、重试、故障转移和

## 描述

`ai-proxy-multi` 插件通过将插件配置转换为 OpenAI、DeepSeek、Azure、AIMLAPI 和其他 OpenAI 兼容 API 的指定请求格式,简化了对 LLM 和嵌入模型的访问。它通过负载均衡、重试、故障转移和健康检查扩展了 [`ai-proxy`](./ai-proxy.md) 的功能。
`ai-proxy-multi` 插件通过将插件配置转换为 OpenAI、DeepSeek、Azure、AIMLAPI、OpenRouter 和其他 OpenAI 兼容 API 的指定请求格式,简化了对 LLM 和嵌入模型的访问。它通过负载均衡、重试、故障转移和健康检查扩展了 [`ai-proxy`](./ai-proxy.md) 的功能。

此外,该插件还支持在访问日志中记录 LLM 请求信息,如令牌使用量、模型、首次响应时间等。

Expand All @@ -58,7 +58,7 @@ description: ai-proxy-multi 插件通过负载均衡、重试、故障转移和
| balancer.key | string | 否 | | | 当 `type` 为 `chash` 时使用。当 `hash_on` 设置为 `header` 或 `cookie` 时,需要 `key`。当 `hash_on` 设置为 `consumer` 时,不需要 `key`,因为消费者名称将自动用作键。 |
| instances | array[object] | 是 | | | LLM 实例配置。 |
| instances.name | string | 是 | | | LLM 服务实例的名称。 |
| instances.provider | string | 是 | | [openai, deepseek, azure-openai, aimlapi, openai-compatible] | LLM 服务提供商。设置为 `openai` 时,插件将代理请求到 `api.openai.com`。设置为 `deepseek` 时,插件将代理请求到 `api.deepseek.com`。设置为 `aimlapi` 时,插件使用 OpenAI 兼容驱动程序,默认将请求代理到 `api.aimlapi.com`。设置为 `openai-compatible` 时,插件将代理请求到在 `override` 中配置的自定义端点。 |
| instances.provider | string | 是 | | [openai, deepseek, azure-openai, aimlapi, openrouter, openai-compatible] | LLM 服务提供商。设置为 `openai` 时,插件将代理请求到 `api.openai.com`。设置为 `deepseek` 时,插件将代理请求到 `api.deepseek.com`。设置为 `aimlapi` 时,插件使用 OpenAI 兼容驱动程序,默认将请求代理到 `api.aimlapi.com`。设置为 `openrouter` 时,插件使用 OpenAI 兼容驱动程序,默认将请求代理到 `openrouter.ai`。设置为 `openai-compatible` 时,插件将代理请求到在 `override` 中配置的自定义端点。 |
| instances.priority | integer | 否 | 0 | | LLM 实例在负载均衡中的优先级。`priority` 优先于 `weight`。 |
| instances.weight | string | 是 | 0 | 大于或等于 0 | LLM 实例在负载均衡中的权重。 |
| instances.auth | object | 是 | | | 身份验证配置。 |
Expand Down
Loading
Loading