Unified Streaming
One streaming model across providers with a consistent response type.
Provider abstraction for OpenAI/Anthropic/Gemini and more — streaming, tools, and multimodal.
cargo add llm-connectoruse llm_connector::{LlmClient, types::{ChatRequest, Message, Role}};
let client = LlmClient::openai("sk-...")?;
let request = ChatRequest {
model: "gpt-4".to_string(),
messages: vec![Message::text(Role::User, "Hello!")],
..Default::default()
};
let response = client.chat(&request).await?;
println!("{}", response.content);