Skip to content

Providers

llm-connector supports 12+ LLM providers with a unified interface.

All factory methods require both api_key and base_url — there are no hidden defaults. Pass LlmClient::builder() if you need timeout or proxy configuration.

Provider Overview

ProviderConstructorAPI Format
OpenAILlmClient::openai(key, base_url)Native
Azure OpenAILlmClient::azure_openai(key, endpoint, api_version)Native
Anthropic ClaudeLlmClient::anthropic(key, base_url)Native
Google GeminiLlmClient::google(key, base_url)Native
Aliyun DashScopeLlmClient::aliyun(key, base_url)Native
Zhipu GLMLlmClient::zhipu(key, base_url)Native
Zhipu (OpenAI compat)LlmClient::zhipu_openai_compatible(key, base_url)OpenAI
Tencent HunyuanLlmClient::tencent(id, key, base_url)Native V3
VolcengineLlmClient::volcengine(key, base_url)OpenAI
DeepSeekLlmClient::deepseek(key, base_url)OpenAI
Moonshot (Kimi)LlmClient::moonshot(key, base_url)OpenAI
Xiaomi MiMoLlmClient::xiaomi(key, base_url)OpenAI
OllamaLlmClient::ollama(base_url)Native
LongCatLlmClient::longcat_anthropic(key, base_url)Anthropic
Any OpenAI-compatLlmClient::openai_compatible(key, base_url, name)OpenAI

OpenAI

rust
use llm_connector::LlmClient;

let client = LlmClient::openai("sk-...", "https://api.openai.com/v1")?;

Azure OpenAI

rust
let client = LlmClient::azure_openai(
    "your-api-key",
    "https://your-resource.openai.azure.com",
    "2024-02-15-preview"
)?;

Anthropic Claude

rust
use llm_connector::LlmClient;

let client = LlmClient::anthropic("sk-ant-...", "https://api.anthropic.com")?;

AWS Bedrock / Google Vertex AI

rust
// AWS Bedrock
let client = LlmClient::anthropic_bedrock("us-east-1", "access_key", "secret_key")?;

// Google Vertex AI
let client = LlmClient::anthropic_vertex("project-id", "us-central1", "access-token")?;

Aliyun DashScope (Qwen)

rust
use llm_connector::LlmClient;

let client = LlmClient::aliyun("sk-...", "https://dashscope.aliyuncs.com")?;

Variants

rust
// International region
let client = LlmClient::aliyun_international("sk-...", "us-east-1")?;

// Private cloud
let client = LlmClient::aliyun_private("sk-...", "https://your-private.aliyun.com")?;

Zhipu GLM

rust
use llm_connector::LlmClient;

// Native SDK style
let client = LlmClient::zhipu("your-api-key", "https://open.bigmodel.cn")?;

// OpenAI Compatible Mode
let client = LlmClient::zhipu_openai_compatible("your-api-key", "https://open.bigmodel.cn")?;

// Enterprise endpoint
let client = LlmClient::zhipu_enterprise("your-api-key", "https://your-enterprise.bigmodel.cn")?;

Google Gemini

rust
use llm_connector::LlmClient;

let client = LlmClient::google(
    "your-api-key",
    "https://generativelanguage.googleapis.com/v1beta"
)?;

Tencent Hunyuan

Native Tencent Cloud API v3 (TC3-HMAC-SHA256). Requires the tencent feature:

toml
llm-connector = { version = "1.0.3", features = ["tencent"] }
rust
use llm_connector::LlmClient;

let client = LlmClient::tencent(
    "AKID...",
    "SecretKey...",
    "https://hunyuan.tencentcloudapi.com"
)?;

Volcengine

rust
use llm_connector::LlmClient;

let client = LlmClient::volcengine(
    "your-api-key",
    "https://ark.cn-beijing.volces.com/api/v3"
)?;

DeepSeek

rust
use llm_connector::LlmClient;

let client = LlmClient::deepseek("sk-...", "https://api.deepseek.com")?;

Moonshot (Kimi)

rust
use llm_connector::LlmClient;

let client = LlmClient::moonshot("sk-...", "https://api.moonshot.cn/v1")?;

Xiaomi MiMo

rust
use llm_connector::LlmClient;

let client = LlmClient::xiaomi("your-api-key", "https://api.xiaomimimo.com/v1")?;

Ollama

rust
use llm_connector::LlmClient;

let client = LlmClient::ollama("http://localhost:11434")?;

LongCat

LongCat supports both OpenAI and Anthropic wire formats:

rust
use llm_connector::LlmClient;

// Anthropic format (Bearer auth)
let client = LlmClient::longcat_anthropic("ak_...", "https://api.longcat.chat/anthropic")?;

// OpenAI format
let client = LlmClient::openai_compatible("ak_...", "https://api.longcat.chat/openai", "longcat")?;

Generic / Custom OpenAI-Compatible

rust
use llm_connector::LlmClient;

let client = LlmClient::openai_compatible(
    "api-key",
    "https://api.example.com/v1",
    "my-provider"
)?;

Builder Pattern (Timeout / Proxy)

rust
use llm_connector::LlmClient;

let client = LlmClient::builder()
    .openai("sk-...")
    .base_url("https://api.openai.com/v1")
    .timeout(120)
    .proxy("http://proxy.example.com:8080")
    .build()?;

Environment Variables

ProviderEnvironment Variable
OpenAIOPENAI_API_KEY
AnthropicANTHROPIC_API_KEY
AliyunDASHSCOPE_API_KEY
ZhipuZHIPU_API_KEY
DeepSeekDEEPSEEK_API_KEY
MoonshotMOONSHOT_API_KEY
XiaomiXIAOMI_API_KEY
GoogleGOOGLE_API_KEY
TencentTENCENT_SECRET_ID, TENCENT_SECRET_KEY
VolcengineVOLCENGINE_API_KEY

Released under the MIT License.