Skip to content
1 change: 1 addition & 0 deletions core/control-plane/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ const modelDescriptionSchema = z.object({
"ovhcloud",
"nebius",
"siliconflow",
"avian",
"tensorix",
"scaleway",
"watsonx",
Expand Down
3 changes: 3 additions & 0 deletions core/llm/autodetect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ const PROVIDER_HANDLES_TEMPLATING: string[] = [
"docker",
"nous",
"zAI",
"avian",
"tensorix",
// TODO add these, change to inverted logic so only the ones that need templating are hardcoded
// Asksage.ts
Expand Down Expand Up @@ -134,6 +135,7 @@ const PROVIDER_SUPPORTS_IMAGES: string[] = [
"ovhcloud",
"watsonx",
"zAI",
"avian",
"tensorix",
];

Expand Down Expand Up @@ -253,6 +255,7 @@ const PARALLEL_PROVIDERS: string[] = [
"vertexai",
"function-network",
"scaleway",
"avian",
"minimax",
"tensorix",
];
Expand Down
13 changes: 13 additions & 0 deletions core/llm/llms/Avian.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { LLMOptions } from "../../index.js";

import OpenAI from "./OpenAI.js";

class Avian extends OpenAI {
static providerName = "avian";
static defaultOptions: Partial<LLMOptions> = {
apiBase: "https://api.avian.io/v1/",
useLegacyCompletionsEndpoint: false,
};
}

export default Avian;
2 changes: 2 additions & 0 deletions core/llm/llms/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { renderTemplatedString } from "../../util/handlebars/renderTemplatedStri
import { BaseLLM } from "../index";
import Anthropic from "./Anthropic";
import Asksage from "./Asksage";
import Avian from "./Avian";
import Azure from "./Azure";
import Bedrock from "./Bedrock";
import BedrockImport from "./BedrockImport";
Expand Down Expand Up @@ -134,6 +135,7 @@ export const LLMClasses = [
LlamaStack,
TARS,
zAI,
Avian,
];

export async function llmFromDescription(
Expand Down
9 changes: 9 additions & 0 deletions core/llm/toolSupport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -433,6 +433,15 @@ export const PROVIDER_TOOL_SUPPORT: Record<string, (model: string) => boolean> =
const lower = model.toLowerCase();
return !!lower.match(/^glm-[4-9]/);
},
avian: (model) => {
const lower = model.toLowerCase();
return (
lower.includes("deepseek") ||
lower.includes("glm") ||
lower.includes("kimi") ||
lower.includes("minimax")
);
},
moonshot: (model) => {
// support moonshot models
// https://platform.moonshot.ai/docs/pricing/chat#concepts
Expand Down
61 changes: 61 additions & 0 deletions docs/customize/model-providers/more/avian.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
---
title: "Avian"
description: "Configure Avian's AI models with Continue, including DeepSeek V3.2, Kimi K2.5, GLM-5, and MiniMax M2.5"
---

[Avian](https://avian.io/) provides an OpenAI-compatible API with access to leading AI models at competitive pricing.

<Info>Get an API key from the [Avian dashboard](https://avian.io)</Info>

## Configuration

<Tabs>
<Tab title="YAML">
```yaml title="config.yaml"
name: My Config
version: 0.0.1
schema: v1

models:
- name: DeepSeek V3.2
provider: avian
model: deepseek/deepseek-v3.2
apiKey: <YOUR_AVIAN_API_KEY>
```
</Tab>
<Tab title="JSON (Deprecated)">
```json title="config.json"
{
"models": [
{
"title": "DeepSeek V3.2",
"provider": "avian",
"model": "deepseek/deepseek-v3.2",
"apiKey": "<YOUR_AVIAN_API_KEY>"
}
]
}
```
</Tab>
</Tabs>

## Available Models

| Model | Context Length | Input Price | Output Price |
| ----- | ------------- | ----------- | ------------ |
| `deepseek/deepseek-v3.2` | 164K | $0.26/M | $0.38/M |
| `moonshotai/kimi-k2.5` | 131K | $0.45/M | $2.20/M |
| `z-ai/glm-5` | 131K | $0.30/M | $2.55/M |
| `minimax/minimax-m2.5` | 1M | $0.30/M | $1.10/M |

## Configuration Options

| Option | Description | Default |
| --------- | -------------------- | ----------------------------- |
| `apiKey` | Avian API key | Required |
| `apiBase` | API base URL | `https://api.avian.io/v1` |
| `model` | Model name to use | - |

<Tip>
You can set the `AVIAN_API_KEY` environment variable instead of specifying the API key directly in the configuration file.
</Tip>
1 change: 1 addition & 0 deletions docs/docs.json
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@
"group": "More Providers",
"pages": [
"customize/model-providers/more/asksage",
"customize/model-providers/more/avian",
"customize/model-providers/more/clawrouter",
"customize/model-providers/more/deepseek",
"customize/model-providers/more/deepinfra",
Expand Down
9 changes: 6 additions & 3 deletions extensions/vscode/config_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,8 @@
"ovhcloud",
"venice",
"inception",
"tars"
"tars",
"avian"
],
"markdownEnumDescriptions": [
"### OpenAI\nUse gpt-4, gpt-3.5-turbo, or any other OpenAI model. See [here](https://openai.com/product#made-for-developers) to obtain an API key.\n\n> [Reference](https://docs.continue.dev/reference/Model%20Providers/openai)",
Expand Down Expand Up @@ -289,7 +290,8 @@
"### OVHcloud AI Endpoints is a serverless inference API that provides access to a curated selection of models (e.g., Llama, Mistral, Qwen, Deepseek). It is designed with security and data privacy in mind and is compliant with GDPR. To get started, create an API key on the OVHcloud [AI Endpoints website](https://endpoints.ai.cloud.ovh.net/). For more information, including pricing, visit the OVHcloud [AI Endpoints product page](https://www.ovhcloud.com/en/public-cloud/ai-endpoints/).",
"### Venice\n Venice.AI is a privacy-focused generative AI platform, allowing users to interact with open-source LLMs without storing any private user data.\nHosted models support the OpenAI API standard, providing seamless integration for users seeking privacy and flexibility.\nTo get started with the Venice API, either purchase a pro account, stake $VVV for daily inference allotments, or fund your account with USD.\nVisit the [API settings page](https://venice.ai/settings/api) or learn more at the [Venice API documentation](https://venice.ai/api).",
"### Inception\n Inception Labs offer a new generation of diffusion-based LLMs.\nVisit the [API settings page](https://platform.inceptionlabs.ai/) or learn more at the [Inception docs](https://platform.inceptionlabs.ai/docs).",
"### TARS\nTARS is an OpenAI-compatible proxy router. To get started, obtain an API key and configure the provider in your config.json."
"### TARS\nTARS is an OpenAI-compatible proxy router. To get started, obtain an API key and configure the provider in your config.json.",
"### Avian\nAvian provides an OpenAI-compatible API with access to leading AI models including DeepSeek, Kimi, GLM, and MiniMax. To get started, obtain an API key from [avian.io](https://avian.io)."
],
"type": "string"
},
Expand Down Expand Up @@ -536,7 +538,8 @@
"kindo",
"scaleway",
"ovhcloud",
"venice"
"venice",
"avian"
]
}
},
Expand Down
Binary file added gui/public/logos/avian.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
53 changes: 53 additions & 0 deletions gui/src/pages/AddNewModel/configs/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -563,6 +563,59 @@ export const models: { [key: string]: ModelPackage } = {
providerOptions: ["zAI"],
isOpenSource: false,
},
avianDeepseekV32: {
title: "DeepSeek V3.2",
description: "DeepSeek V3.2 with 164K context, available through Avian",
refUrl: "https://avian.io",
params: {
title: "DeepSeek V3.2",
model: "deepseek/deepseek-v3.2",
contextLength: 164_000,
},
icon: "avian.png",
providerOptions: ["avian"],
isOpenSource: false,
},
avianKimiK25: {
title: "Kimi K2.5",
description:
"Moonshot AI's Kimi K2.5 with 131K context, available through Avian",
refUrl: "https://avian.io",
params: {
title: "Kimi K2.5",
model: "moonshotai/kimi-k2.5",
contextLength: 131_000,
},
icon: "avian.png",
providerOptions: ["avian"],
isOpenSource: false,
},
avianGlm5: {
title: "GLM-5",
description: "Z.ai's GLM-5 with 131K context, available through Avian",
refUrl: "https://avian.io",
params: {
title: "GLM-5",
model: "z-ai/glm-5",
contextLength: 131_000,
},
icon: "avian.png",
providerOptions: ["avian"],
isOpenSource: false,
},
avianMinimaxM25: {
title: "MiniMax M2.5",
description: "MiniMax M2.5 with 1M context window, available through Avian",
refUrl: "https://avian.io",
params: {
title: "MiniMax M2.5",
model: "minimax/minimax-m2.5",
contextLength: 1_000_000,
},
icon: "avian.png",
providerOptions: ["avian"],
isOpenSource: false,
},
mistralOs: {
title: "Mistral",
description:
Expand Down
26 changes: 26 additions & 0 deletions gui/src/pages/AddNewModel/configs/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,32 @@ export const providers: Partial<Record<string, ProviderInfo>> = {
],
apiKeyUrl: "https://z.ai/manage-apikey/apikey-list",
},
avian: {
title: "Avian",
provider: "avian",
description: "Access top AI models at low cost through Avian's API",
longDescription:
"Avian provides an OpenAI-compatible API with access to leading AI models including DeepSeek V3.2, Kimi K2.5, GLM-5, and MiniMax M2.5. Get your API key from the [Avian dashboard](https://avian.io).",
icon: "avian.png",
tags: [ModelProviderTags.RequiresApiKey],
packages: [
models.avianDeepseekV32,
models.avianKimiK25,
models.avianGlm5,
models.avianMinimaxM25,
],
collectInputFor: [
{
inputType: "text",
key: "apiKey",
label: "API Key",
placeholder: "Enter your Avian API key",
required: true,
},
...completionParamsInputsConfigs,
],
apiKeyUrl: "https://avian.io",
},
"function-network": {
title: "Function Network",
provider: "function-network",
Expand Down
1 change: 1 addition & 0 deletions packages/config-types/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ export const modelDescriptionSchema = z.object({
"scaleway",
"watsonx",
"minimax",
"avian",
]),
model: z.string(),
apiKey: z.string().optional(),
Expand Down
2 changes: 2 additions & 0 deletions packages/llm-info/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Anthropic } from "./providers/anthropic.js";
import { Avian } from "./providers/avian.js";
import { Azure } from "./providers/azure.js";
import { Bedrock } from "./providers/bedrock.js";
import { Cohere } from "./providers/cohere.js";
Expand Down Expand Up @@ -29,6 +30,7 @@ export const allModelProviders: ModelProvider[] = [
MiniMax,
xAI,
zAI,
Avian,
];

export const allLlms: LlmInfoWithProvider[] = allModelProviders.flatMap(
Expand Down
36 changes: 36 additions & 0 deletions packages/llm-info/src/providers/avian.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import { ModelProvider } from "../types.js";

export const Avian: ModelProvider = {
models: [
{
model: "deepseek/deepseek-v3.2",
displayName: "DeepSeek V3.2",
contextLength: 164000,
recommendedFor: ["chat"],
regex: /deepseek\/deepseek-v3\.2/,
},
{
model: "moonshotai/kimi-k2.5",
displayName: "Kimi K2.5",
contextLength: 131000,
recommendedFor: ["chat"],
regex: /moonshotai\/kimi-k2\.5/,
},
{
model: "z-ai/glm-5",
displayName: "GLM-5",
contextLength: 131000,
recommendedFor: ["chat"],
regex: /z-ai\/glm-5/,
},
{
model: "minimax/minimax-m2.5",
displayName: "MiniMax M2.5",
contextLength: 1000000,
recommendedFor: ["chat"],
regex: /minimax\/minimax-m2\.5/,
},
],
id: "avian",
displayName: "Avian",
};
2 changes: 2 additions & 0 deletions packages/openai-adapters/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,8 @@ export function constructLlmApi(config: LLMConfig): BaseLlmApi | undefined {
return openAICompatible("https://api.x.ai/v1/", config);
case "zAI":
return openAICompatible("https://api.z.ai/api/paas/v4/", config);
case "avian":
return openAICompatible("https://api.avian.io/v1/", config);
case "voyage":
return openAICompatible("https://api.voyageai.com/v1/", config);
case "mistral":
Expand Down
1 change: 1 addition & 0 deletions packages/openai-adapters/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ export const OpenAIConfigSchema = BasePlusConfig.extend({
z.literal("vllm"),
z.literal("xAI"),
z.literal("zAI"),
z.literal("avian"),
z.literal("scaleway"),
z.literal("tensorix"),
z.literal("ncompass"),
Expand Down
Loading