The AI Toolkit for Rust
An open-source Rust library for building AI-powered applications, inspired by the Vercel AI SDK.
cargo add aisdk
Stream
use aisdk::{ core::LanguageModelRequest, providers::OpenAI,};let stream = LanguageModelRequest::builder() .model(OpenAI::gpt_5()) .prompt("What is the meaning of life?") .build() .stream_text() .await? .stream;use aisdk::{ core::LanguageModelRequest, providers::OpenAI,};let response = LanguageModelRequest::builder() .model(OpenAI::gpt_5()) .prompt("What is the meaning of life?") .build() .generate_text() .await? .text();#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}let stream = LanguageModelRequest::builder() .model(OpenAI::gpt_5()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) .build() .stream_text() .await? .stream;#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}let response = LanguageModelRequest::builder() .model(OpenAI::gpt_5()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) .build() .generate_text() .await? .text();#[derive(JsonSchema, Deserialize)]struct User { name: String, age: u32, email: String}let stream = LanguageModelRequest::builder() .model(OpenAI::gpt_5()) .prompt("Generate a random user") .schema::<User>() .build() .stream_text() .await? .stream;#[derive(JsonSchema, Deserialize)]struct User { name: String, age: u32, email: String}let user: User = LanguageModelRequest::builder() .model(OpenAI::gpt_5()) .prompt("Generate a random user") .schema::<User>() .build() .generate_text() .await? .into_schema()?;#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}// Using model that doesn't support tool callslet stream = LanguageModelRequest::builder() .model(OpenAI::gpt_3_5_turbo()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) // ^ 🦀 COMPILE ERROR 🦀 // GPT 3.5 Turbo doesn't support tools .build() .stream_text() .await?;#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}// Using model that doesn't support tool callslet response = LanguageModelRequest::builder() .model(OpenAI::gpt_3_5_turbo()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) // ^ 🦀 COMPILE ERROR 🦀 // GPT 3.5 Turbo doesn't support tools .build() .generate_text() .await?;use aisdk::{ core::LanguageModelRequest, providers::Anthropic,};let stream = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_5()) .prompt("What is the meaning of life?") .build() .stream_text() .await? .stream;use aisdk::{ core::LanguageModelRequest, providers::Anthropic,};let response = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_5()) .prompt("What is the meaning of life?") .build() .generate_text() .await? .text();#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}let stream = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_5()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) .build() .stream_text() .await? .stream;#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}let response = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_5()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) .build() .generate_text() .await? .text();#[derive(JsonSchema, Deserialize)]struct User { name: String, age: u32, email: String}let stream = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_5()) .prompt("Generate a random user") .schema::<User>() .build() .stream_text() .await? .stream;#[derive(JsonSchema, Deserialize)]struct User { name: String, age: u32, email: String}let user: User = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_5()) .prompt("Generate a random user") .schema::<User>() .build() .generate_text() .await? .into_schema()?;#[derive(JsonSchema, Deserialize)]struct User { name: String, email: String}// Using model that doesn't support structured outputlet stream = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_0()) .prompt("Generate a random user") .schema::<User>() // ^ 🦀 COMPILE ERROR 🦀 // Claude Opus 4.0 doesn't support structured output .build() .stream_text() .await? .stream;#[derive(JsonSchema, Deserialize)]struct User { name: String, email: String}// Using model that doesn't support structured outputlet user: User = LanguageModelRequest::builder() .model(Anthropic::claude_opus_4_0()) .prompt("Generate a random user") .schema::<User>() // ^ 🦀 COMPILE ERROR 🦀 // Claude Opus 4.0 doesn't support structured output .build() .generate_text() .await? .into_schema()?;use aisdk::{ core::LanguageModelRequest, providers::Google,};let stream = LanguageModelRequest::builder() .model(Google::gemini_3_flash_preview()) .prompt("What is the meaning of life?") .build() .stream_text() .await? .stream;use aisdk::{ core::LanguageModelRequest, providers::Google,};let response = LanguageModelRequest::builder() .model(Google::gemini_3_flash_preview()) .prompt("What is the meaning of life?") .build() .generate_text() .await? .text();#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}let stream = LanguageModelRequest::builder() .model(Google::gemini_3_flash_preview()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) .build() .stream_text() .await? .stream;#[tool]/// Get the weather in a given locationfn get_weather(location: String) -> Tool { Ok(format!("72°F in {}", location))}let response = LanguageModelRequest::builder() .model(Google::gemini_3_flash_preview()) .system("You are a helpful assistant.") .prompt("Weather in SF?") .with_tool(get_weather()) .build() .generate_text() .await? .text();#[derive(JsonSchema, Deserialize)]struct User { name: String, age: u32, email: String}let stream = LanguageModelRequest::builder() .model(Google::gemini_3_flash_preview()) .prompt("Generate a random user") .schema::<User>() .build() .stream_text() .await? .stream;#[derive(JsonSchema, Deserialize)]struct User { name: String, age: u32, email: String}let user: User = LanguageModelRequest::builder() .model(Google::gemini_3_flash_preview()) .prompt("Generate a random user") .schema::<User>() .build() .generate_text() .await? .into_schema()?;use aisdk::{ core::LanguageModelRequest, providers::Google,};// Using model that doesn't text output for text generationlet stream = LanguageModelRequest::builder() .model(Google::gemini_2_5_pro_preview_tts()) // ^ 🦀 COMPILE ERROR 🦀 // Gemini 2.5 Pro Preview TTS doesn't support text generation .prompt("Explain lifetimes like I'm five") .build() .stream_text() .await? .stream;use aisdk::{ core::LanguageModelRequest, providers::Google,};// Using model that doesn't text output for text generationlet response = LanguageModelRequest::builder() .model(Google::gemini_2_5_pro_preview_tts()) // ^ 🦀 COMPILE ERROR 🦀 // Gemini 2.5 Pro Preview TTS doesn't support text generation .prompt("Explain lifetimes like I'm five") .build() .generate_text() .await? .text();