diff --git a/crates/factor-llm/src/spin.rs b/crates/factor-llm/src/spin.rs index 75507b9d8d..e2fc22bfb8 100644 --- a/crates/factor-llm/src/spin.rs +++ b/crates/factor-llm/src/spin.rs @@ -2,7 +2,7 @@ use std::path::PathBuf; use std::sync::Arc; use spin_factors::runtime_config::toml::GetTomlValue; -use spin_llm_remote_http::RemoteHttpLlmEngine; +use spin_llm_remote_http::{ApiType, RemoteHttpLlmEngine}; use spin_world::async_trait; use spin_world::v1::llm::{self as v1}; use spin_world::v2::llm::{self as v2}; @@ -122,6 +122,7 @@ impl LlmCompute { LlmCompute::RemoteHttp(config) => Arc::new(Mutex::new(RemoteHttpLlmEngine::new( config.url, config.auth_token, + config.api_type, ))), }; Ok(engine) @@ -132,6 +133,8 @@ impl LlmCompute { pub struct RemoteHttpCompute { url: Url, auth_token: String, + #[serde(default)] + api_type: ApiType, } /// A noop engine used when the local engine feature is disabled. diff --git a/crates/llm-remote-http/src/default.rs b/crates/llm-remote-http/src/default.rs new file mode 100644 index 0000000000..e8091b000e --- /dev/null +++ b/crates/llm-remote-http/src/default.rs @@ -0,0 +1,136 @@ +use anyhow::Result; +use reqwest::{ + header::{HeaderMap, HeaderValue}, + Client, Url, +}; +use serde_json::json; +use spin_world::{ + async_trait, + v2::llm::{self as wasi_llm}, +}; + +use crate::{EmbeddingResponseBody, InferRequestBodyParams, InferResponseBody, LlmWorker}; + +pub(crate) struct DefaultAgentEngine { + auth_token: String, + url: Url, + client: Option, +} + +impl DefaultAgentEngine { + pub fn new(auth_token: String, url: Url, client: Option) -> Self { + Self { + auth_token, + url, + client, + } + } +} + +#[async_trait] +impl LlmWorker for DefaultAgentEngine { + async fn infer( + &mut self, + model: wasi_llm::InferencingModel, + prompt: String, + params: wasi_llm::InferencingParams, + ) -> Result { + let client = self.client.get_or_insert_with(Default::default); + + let mut headers = HeaderMap::new(); + headers.insert( + "authorization", + HeaderValue::from_str(&format!("bearer {}", self.auth_token)).map_err(|_| { + wasi_llm::Error::RuntimeError("Failed to create authorization header".to_string()) + })?, + ); + spin_telemetry::inject_trace_context(&mut headers); + + let inference_options = InferRequestBodyParams { + max_tokens: params.max_tokens, + repeat_penalty: params.repeat_penalty, + repeat_penalty_last_n_token_count: params.repeat_penalty_last_n_token_count, + temperature: params.temperature, + top_k: params.top_k, + top_p: params.top_p, + }; + let body = serde_json::to_string(&json!({ + "model": model, + "prompt": prompt, + "options": inference_options + })) + .map_err(|_| wasi_llm::Error::RuntimeError("Failed to serialize JSON".to_string()))?; + + let infer_url = self + .url + .join("/infer") + .map_err(|_| wasi_llm::Error::RuntimeError("Failed to create URL".to_string()))?; + tracing::info!("Sending remote inference request to {infer_url}"); + + let resp = client + .request(reqwest::Method::POST, infer_url) + .headers(headers) + .body(body) + .send() + .await + .map_err(|err| { + wasi_llm::Error::RuntimeError(format!("POST /infer request error: {err}")) + })?; + + match resp.json::().await { + Ok(val) => Ok(val.into()), + Err(err) => Err(wasi_llm::Error::RuntimeError(format!( + "Failed to deserialize response for \"POST /index\": {err}" + ))), + } + } + + async fn generate_embeddings( + &mut self, + model: wasi_llm::EmbeddingModel, + data: Vec, + ) -> Result { + let client = self.client.get_or_insert_with(Default::default); + + let mut headers = HeaderMap::new(); + headers.insert( + "authorization", + HeaderValue::from_str(&format!("bearer {}", self.auth_token)).map_err(|_| { + wasi_llm::Error::RuntimeError("Failed to create authorization header".to_string()) + })?, + ); + spin_telemetry::inject_trace_context(&mut headers); + + let body = serde_json::to_string(&json!({ + "model": model, + "input": data + })) + .map_err(|_| wasi_llm::Error::RuntimeError("Failed to serialize JSON".to_string()))?; + + let resp = client + .request( + reqwest::Method::POST, + self.url.join("/embed").map_err(|_| { + wasi_llm::Error::RuntimeError("Failed to create URL".to_string()) + })?, + ) + .headers(headers) + .body(body) + .send() + .await + .map_err(|err| { + wasi_llm::Error::RuntimeError(format!("POST /embed request error: {err}")) + })?; + + match resp.json::().await { + Ok(val) => Ok(val.into()), + Err(err) => Err(wasi_llm::Error::RuntimeError(format!( + "Failed to deserialize response for \"POST /embed\": {err}" + ))), + } + } + + fn url(&self) -> Url { + self.url.clone() + } +} diff --git a/crates/llm-remote-http/src/lib.rs b/crates/llm-remote-http/src/lib.rs index cb2a3913c4..fb84eee528 100644 --- a/crates/llm-remote-http/src/lib.rs +++ b/crates/llm-remote-http/src/lib.rs @@ -1,17 +1,47 @@ use anyhow::Result; -use reqwest::{ - header::{HeaderMap, HeaderValue}, - Client, Url, -}; +use reqwest::Url; use serde::{Deserialize, Serialize}; -use serde_json::json; -use spin_world::v2::llm::{self as wasi_llm}; +use spin_world::{ + async_trait, + v2::llm::{self as wasi_llm}, +}; + +use crate::schema::{ChatCompletionChoice, Embedding}; + +mod default; +mod open_ai; +mod schema; -#[derive(Clone)] pub struct RemoteHttpLlmEngine { - auth_token: String, - url: Url, - client: Option, + worker: Box, +} + +impl RemoteHttpLlmEngine { + pub fn new(url: Url, auth_token: String, api_type: ApiType) -> Self { + let worker: Box = match api_type { + ApiType::OpenAi => Box::new(open_ai::OpenAIAgentEngine::new(auth_token, url, None)), + ApiType::Default => Box::new(default::DefaultAgentEngine::new(auth_token, url, None)), + }; + Self { worker } + } +} + +#[async_trait] +pub trait LlmWorker: Send + Sync { + async fn infer( + &mut self, + model: wasi_llm::InferencingModel, + prompt: String, + params: wasi_llm::InferencingParams, + ) -> Result; + + async fn generate_embeddings( + &mut self, + model: wasi_llm::EmbeddingModel, + data: Vec, + ) -> Result; + + fn url(&self) -> Url; } #[derive(Serialize)] @@ -38,6 +68,43 @@ struct InferResponseBody { usage: InferUsage, } +#[derive(Deserialize)] +struct CreateChatCompletionResponse { + /// A unique identifier for the chat completion. + #[serde(rename = "id")] + _id: String, + /// The object type, which is always `chat.completion`. + #[serde(rename = "object")] + _object: String, + /// The Unix timestamp (in seconds) of when the chat completion was created. + #[serde(rename = "created")] + _created: u64, + /// The model used for the chat completion. + #[serde(rename = "model")] + _model: String, + /// This fingerprint represents the backend configuration that the model runs with. + /// + /// While it's deprecated, it's still provided for compatibility with older clients. + #[serde(rename = "system_fingerprint")] + _system_fingerprint: Option, + /// A list of chat completion choices. Can be more than one if `n` is greater than 1. + choices: Vec, + /// Usage statistics for the completion request + #[serde(rename = "usage")] + usage: CompletionUsage, +} + +#[derive(Deserialize)] +struct CompletionUsage { + /// Number of tokens in the generated completion. + completion_tokens: u32, + /// Number of tokens in the prompt. + prompt_tokens: u32, + /// Total number of tokens used in the request (prompt + completion). + #[serde(rename = "total_tokens")] + _total_tokens: u32, +} + #[derive(Deserialize)] #[serde(rename_all(deserialize = "camelCase"))] struct EmbeddingUsage { @@ -50,6 +117,32 @@ struct EmbeddingResponseBody { usage: EmbeddingUsage, } +#[derive(Deserialize)] +struct CreateEmbeddingResponse { + #[serde(rename = "object")] + _object: String, + #[serde(rename = "model")] + _model: String, + data: Vec, + usage: OpenAIEmbeddingUsage, +} + +impl CreateEmbeddingResponse { + fn embeddings(&self) -> Vec> { + self.data + .iter() + .map(|embedding| embedding.embedding.clone()) + .collect() + } +} + +#[derive(Deserialize)] +struct OpenAIEmbeddingUsage { + prompt_tokens: u32, + #[serde(rename = "total_tokens")] + _total_tokens: u32, +} + impl RemoteHttpLlmEngine { pub async fn infer( &mut self, @@ -57,60 +150,7 @@ impl RemoteHttpLlmEngine { prompt: String, params: wasi_llm::InferencingParams, ) -> Result { - let client = self.client.get_or_insert_with(Default::default); - - let mut headers = HeaderMap::new(); - headers.insert( - "authorization", - HeaderValue::from_str(&format!("bearer {}", self.auth_token)).map_err(|_| { - wasi_llm::Error::RuntimeError("Failed to create authorization header".to_string()) - })?, - ); - spin_telemetry::inject_trace_context(&mut headers); - - let inference_options = InferRequestBodyParams { - max_tokens: params.max_tokens, - repeat_penalty: params.repeat_penalty, - repeat_penalty_last_n_token_count: params.repeat_penalty_last_n_token_count, - temperature: params.temperature, - top_k: params.top_k, - top_p: params.top_p, - }; - let body = serde_json::to_string(&json!({ - "model": model, - "prompt": prompt, - "options": inference_options - })) - .map_err(|_| wasi_llm::Error::RuntimeError("Failed to serialize JSON".to_string()))?; - - let infer_url = self - .url - .join("/infer") - .map_err(|_| wasi_llm::Error::RuntimeError("Failed to create URL".to_string()))?; - tracing::info!("Sending remote inference request to {infer_url}"); - - let resp = client - .request(reqwest::Method::POST, infer_url) - .headers(headers) - .body(body) - .send() - .await - .map_err(|err| { - wasi_llm::Error::RuntimeError(format!("POST /infer request error: {err}")) - })?; - - match resp.json::().await { - Ok(val) => Ok(wasi_llm::InferencingResult { - text: val.text, - usage: wasi_llm::InferencingUsage { - prompt_token_count: val.usage.prompt_token_count, - generated_token_count: val.usage.generated_token_count, - }, - }), - Err(err) => Err(wasi_llm::Error::RuntimeError(format!( - "Failed to deserialize response for \"POST /index\": {err}" - ))), - } + self.worker.infer(model, prompt, params).await } pub async fn generate_embeddings( @@ -118,62 +158,65 @@ impl RemoteHttpLlmEngine { model: wasi_llm::EmbeddingModel, data: Vec, ) -> Result { - let client = self.client.get_or_insert_with(Default::default); - - let mut headers = HeaderMap::new(); - headers.insert( - "authorization", - HeaderValue::from_str(&format!("bearer {}", self.auth_token)).map_err(|_| { - wasi_llm::Error::RuntimeError("Failed to create authorization header".to_string()) - })?, - ); - spin_telemetry::inject_trace_context(&mut headers); - - let body = serde_json::to_string(&json!({ - "model": model, - "input": data - })) - .map_err(|_| wasi_llm::Error::RuntimeError("Failed to serialize JSON".to_string()))?; - - let resp = client - .request( - reqwest::Method::POST, - self.url.join("/embed").map_err(|_| { - wasi_llm::Error::RuntimeError("Failed to create URL".to_string()) - })?, - ) - .headers(headers) - .body(body) - .send() - .await - .map_err(|err| { - wasi_llm::Error::RuntimeError(format!("POST /embed request error: {err}")) - })?; - - match resp.json::().await { - Ok(val) => Ok(wasi_llm::EmbeddingsResult { - embeddings: val.embeddings, - usage: wasi_llm::EmbeddingsUsage { - prompt_token_count: val.usage.prompt_token_count, - }, - }), - Err(err) => Err(wasi_llm::Error::RuntimeError(format!( - "Failed to deserialize response for \"POST /embed\": {err}" - ))), - } + self.worker.generate_embeddings(model, data).await } pub fn url(&self) -> Url { - self.url.clone() + self.worker.url() } } -impl RemoteHttpLlmEngine { - pub fn new(url: Url, auth_token: String) -> Self { - RemoteHttpLlmEngine { - url, - auth_token, - client: None, +impl From for wasi_llm::InferencingResult { + fn from(value: InferResponseBody) -> Self { + Self { + text: value.text, + usage: wasi_llm::InferencingUsage { + prompt_token_count: value.usage.prompt_token_count, + generated_token_count: value.usage.generated_token_count, + }, + } + } +} + +impl From for wasi_llm::InferencingResult { + fn from(value: CreateChatCompletionResponse) -> Self { + Self { + text: value.choices[0].message.content.clone(), + usage: wasi_llm::InferencingUsage { + prompt_token_count: value.usage.prompt_tokens, + generated_token_count: value.usage.completion_tokens, + }, + } + } +} + +impl From for wasi_llm::EmbeddingsResult { + fn from(value: EmbeddingResponseBody) -> Self { + Self { + embeddings: value.embeddings, + usage: wasi_llm::EmbeddingsUsage { + prompt_token_count: value.usage.prompt_token_count, + }, } } } + +impl From for wasi_llm::EmbeddingsResult { + fn from(value: CreateEmbeddingResponse) -> Self { + Self { + embeddings: value.embeddings(), + usage: wasi_llm::EmbeddingsUsage { + prompt_token_count: value.usage.prompt_tokens, + }, + } + } +} + +#[derive(Debug, Default, serde::Deserialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum ApiType { + /// Compatible with OpenAI's API alongside some other LLMs + OpenAi, + #[default] + Default, +} diff --git a/crates/llm-remote-http/src/open_ai.rs b/crates/llm-remote-http/src/open_ai.rs new file mode 100644 index 0000000000..646f81e472 --- /dev/null +++ b/crates/llm-remote-http/src/open_ai.rs @@ -0,0 +1,182 @@ +use reqwest::{ + header::{HeaderMap, HeaderValue}, + Client, Url, +}; +use serde::Serialize; +use spin_world::{ + async_trait, + v2::llm::{self as wasi_llm}, +}; + +use crate::{ + schema::{EncodingFormat, Prompt, ResponseError, Role}, + CreateChatCompletionResponse, CreateEmbeddingResponse, LlmWorker, +}; + +pub(crate) struct OpenAIAgentEngine { + auth_token: String, + url: Url, + client: Option, +} + +impl OpenAIAgentEngine { + pub fn new(auth_token: String, url: Url, client: Option) -> Self { + Self { + auth_token, + url, + client, + } + } +} + +#[async_trait] +impl LlmWorker for OpenAIAgentEngine { + async fn infer( + &mut self, + model: wasi_llm::InferencingModel, + prompt: String, + params: wasi_llm::InferencingParams, + ) -> Result { + let client = self.client.get_or_insert_with(Default::default); + + let mut headers = HeaderMap::new(); + headers.insert( + "authorization", + HeaderValue::from_str(&format!("bearer {}", self.auth_token)).map_err(|_| { + wasi_llm::Error::RuntimeError("Failed to create authorization header".to_string()) + })?, + ); + spin_telemetry::inject_trace_context(&mut headers); + + let chat_url = self + .url + .join("/v1/chat/completions") + .map_err(|_| wasi_llm::Error::RuntimeError("Failed to create URL".to_string()))?; + + tracing::info!("Sending remote inference request to {chat_url}"); + + let body = CreateChatCompletionRequest { + // TODO: Make Role customizable + messages: vec![Prompt::new(Role::User, prompt)], + model, + max_completion_tokens: Some(params.max_tokens), + frequency_penalty: Some(params.repeat_penalty), + reasoning_effort: None, + verbosity: None, + }; + + let resp = client + .request(reqwest::Method::POST, chat_url) + .headers(headers) + .json(&body) + .send() + .await + .map_err(|err| { + wasi_llm::Error::RuntimeError(format!( + "POST /v1/chat/completions request error: {err}" + )) + })?; + + match resp.json::().await { + Ok(CreateChatCompletionResponses::Success(val)) => Ok(val.into()), + Ok(CreateChatCompletionResponses::Error { error }) => Err(error.into()), + Err(err) => Err(wasi_llm::Error::RuntimeError(format!( + "Failed to deserialize response for \"POST /v1/chat/completions\": {err}" + ))), + } + } + + async fn generate_embeddings( + &mut self, + model: wasi_llm::EmbeddingModel, + data: Vec, + ) -> Result { + let client = self.client.get_or_insert_with(Default::default); + + let mut headers = HeaderMap::new(); + headers.insert( + "authorization", + HeaderValue::from_str(&format!("bearer {}", self.auth_token)).map_err(|_| { + wasi_llm::Error::RuntimeError("Failed to create authorization header".to_string()) + })?, + ); + spin_telemetry::inject_trace_context(&mut headers); + + let body = CreateEmbeddingRequest { + input: data, + model, + encoding_format: None, + dimensions: None, + user: None, + }; + + let chat_url = self + .url + .join("/v1/embeddings") + .map_err(|_| wasi_llm::Error::RuntimeError("Failed to create URL".to_string()))?; + + tracing::info!("Sending remote embedding request to {chat_url}"); + + let resp = client + .request(reqwest::Method::POST, chat_url) + .headers(headers) + .json(&body) + .send() + .await + .map_err(|err| { + wasi_llm::Error::RuntimeError(format!("POST /v1/embeddings request error: {err}")) + })?; + + match resp.json::().await { + Ok(CreateEmbeddingResponses::Success(val)) => Ok(val.into()), + Ok(CreateEmbeddingResponses::Error { error }) => Err(error.into()), + Err(err) => Err(wasi_llm::Error::RuntimeError(format!( + "Failed to deserialize response for \"POST /v1/embeddings\": {err}" + ))), + } + } + + fn url(&self) -> Url { + self.url.clone() + } +} + +#[derive(Serialize, Debug)] +struct CreateChatCompletionRequest { + messages: Vec, + model: String, + #[serde(skip_serializing_if = "Option::is_none")] + max_completion_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + frequency_penalty: Option, + #[serde(skip_serializing_if = "Option::is_none")] + reasoning_effort: Option, + #[serde(skip_serializing_if = "Option::is_none")] + verbosity: Option, +} + +#[derive(Serialize, Debug)] +pub struct CreateEmbeddingRequest { + input: Vec, + model: String, + #[serde(skip_serializing_if = "Option::is_none")] + encoding_format: Option, + #[serde(skip_serializing_if = "Option::is_none")] + dimensions: Option, + #[serde(skip_serializing_if = "Option::is_none")] + user: Option, +} + +#[derive(serde::Deserialize)] +#[serde(untagged)] +enum CreateChatCompletionResponses { + Success(CreateChatCompletionResponse), + Error { error: ResponseError }, +} + +#[derive(serde::Deserialize)] +#[serde(untagged)] +enum CreateEmbeddingResponses { + Success(CreateEmbeddingResponse), + Error { error: ResponseError }, +} diff --git a/crates/llm-remote-http/src/schema.rs b/crates/llm-remote-http/src/schema.rs new file mode 100644 index 0000000000..e7a6937fd3 --- /dev/null +++ b/crates/llm-remote-http/src/schema.rs @@ -0,0 +1,185 @@ +use serde::{Deserialize, Serialize}; +use spin_world::v2::llm as wasi_llm; + +#[derive(Serialize, Debug)] +pub struct Prompt { + role: Role, + content: String, +} + +impl Prompt { + pub fn new(role: Role, content: String) -> Self { + Self { role, content } + } +} + +#[derive(Serialize, Debug)] +pub enum Role { + #[serde(rename = "system")] + System, + #[serde(rename = "user")] + User, + #[serde(rename = "assistant")] + Assistant, + #[serde(rename = "tool")] + Tool, +} + +impl TryFrom<&str> for Role { + type Error = wasi_llm::Error; + + fn try_from(value: &str) -> Result { + match value { + "system" => Ok(Role::System), + "user" => Ok(Role::User), + "assistant" => Ok(Role::Assistant), + "tool" => Ok(Role::Tool), + _ => Err(wasi_llm::Error::InvalidInput(format!( + "{value} not a valid role" + ))), + } + } +} + +#[derive(Serialize, Debug)] +pub enum EncodingFormat { + #[serde(rename = "float")] + Float, + #[serde(rename = "base64")] + Base64, +} + +impl TryFrom<&str> for EncodingFormat { + type Error = wasi_llm::Error; + + fn try_from(value: &str) -> Result { + match value { + "float" => Ok(EncodingFormat::Float), + "base64" => Ok(EncodingFormat::Base64), + _ => Err(wasi_llm::Error::InvalidInput(format!( + "{value} not a valid encoding format" + ))), + } + } +} + +#[derive(Serialize, Debug)] +enum ReasoningEffort { + #[serde(rename = "minimal")] + Minimal, + #[serde(rename = "low")] + Low, + #[serde(rename = "medium")] + Medium, + #[serde(rename = "high")] + High, +} + +impl TryFrom<&str> for ReasoningEffort { + type Error = wasi_llm::Error; + + fn try_from(value: &str) -> Result { + match value { + "minimal" => Ok(ReasoningEffort::Minimal), + "low" => Ok(ReasoningEffort::Low), + "medium" => Ok(ReasoningEffort::Medium), + "high" => Ok(ReasoningEffort::High), + _ => Err(wasi_llm::Error::InvalidInput(format!( + "{value} not a recognized reasoning effort", + ))), + } + } +} + +#[derive(Serialize, Debug)] +enum Verbosity { + Low, + Medium, + High, +} + +impl TryFrom<&str> for Verbosity { + type Error = wasi_llm::Error; + + fn try_from(value: &str) -> Result { + match value { + "low" => Ok(Verbosity::Low), + "medium" => Ok(Verbosity::Medium), + "high" => Ok(Verbosity::High), + _ => Err(wasi_llm::Error::InvalidInput(format!( + "{value} not a recognized verbosity", + ))), + } + } +} + +#[derive(Deserialize)] +pub struct ChatCompletionChoice { + #[serde(rename = "index")] + /// The index of the choice in the list of choices + _index: u32, + pub message: ChatCompletionResponseMessage, + /// The reason the model stopped generating tokens. This will be `stop` if the model hit a + /// natural stop point or a provided stop sequence, + #[serde(rename = "finish_reason")] + _finish_reason: String, + /// Log probability information for the choice. + #[serde(rename = "logprobs")] + _logprobs: Option, +} + +#[derive(Deserialize)] +/// A chat completion message generated by the model. +pub struct ChatCompletionResponseMessage { + /// The role of the author of this message + #[serde(rename = "role")] + _role: String, + /// The contents of the message + pub content: String, + /// The refusal message generated by the model + #[serde(rename = "refusal")] + _refusal: Option, + /// The reasoning message generated by the model + #[serde(rename = "reasoning")] + _reasoning: Option, +} + +#[derive(Deserialize)] +pub struct Logprobs { + /// A list of message content tokens with log probability information. + #[serde(rename = "content")] + _content: Option>, + /// A list of message refusal tokens with log probability information. + #[serde(rename = "refusal")] + _refusal: Option>, +} + +#[derive(Deserialize)] +pub struct Embedding { + /// The index of the embedding in the list of embeddings.. + #[serde(rename = "index")] + _index: u32, + /// The embedding vector, which is a list of floats. The length of vector depends on the model as + /// listed in the [embedding guide](https://platform.openai.com/docs/guides/embeddings). + pub embedding: Vec, + /// The object type, which is always "embedding" + #[serde(rename = "object")] + _object: String, +} + +#[derive(Deserialize, Default)] +pub struct ResponseError { + pub message: String, + #[serde(rename = "type")] + _t: String, + #[serde(rename = "param")] + _param: Option, + #[serde(rename = "code")] + _code: String, +} + +impl From for wasi_llm::Error { + fn from(value: ResponseError) -> Self { + wasi_llm::Error::RuntimeError(value.message) + } +} diff --git a/examples/open-ai-rust/.gitignore b/examples/open-ai-rust/.gitignore new file mode 100644 index 0000000000..386474fa59 --- /dev/null +++ b/examples/open-ai-rust/.gitignore @@ -0,0 +1,2 @@ +target/ +.spin/ diff --git a/examples/open-ai-rust/Cargo.lock b/examples/open-ai-rust/Cargo.lock new file mode 100644 index 0000000000..340ce2bb1a --- /dev/null +++ b/examples/open-ai-rust/Cargo.lock @@ -0,0 +1,853 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "bitflags" +version = "2.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34efbcccd345379ca2868b2b2c9d3782e9cc58ba87bc7d79d5b53d9c9ae6f25d" + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "cc" +version = "1.2.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" + +[[package]] +name = "chrono" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "id-arena" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" + +[[package]] +name = "indexmap" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9" +dependencies = [ + "equivalent", + "hashbrown", + "serde", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "libc" +version = "0.2.175" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "open-ai-rust" +version = "0.1.0" +dependencies = [ + "anyhow", + "spin-sdk", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "proc-macro2" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "routefinder" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0971d3c8943a6267d6bd0d782fdc4afa7593e7381a92a3df950ff58897e066b5" +dependencies = [ + "smartcow", + "smartstring", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "semver" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "serde_json" +version = "1.0.143" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "smartcow" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "656fcb1c1fca8c4655372134ce87d8afdf5ec5949ebabe8d314be0141d8b5da2" +dependencies = [ + "smartstring", +] + +[[package]] +name = "smartstring" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29" +dependencies = [ + "autocfg", + "static_assertions", + "version_check", +] + +[[package]] +name = "spdx" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] + +[[package]] +name = "spin-executor" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d11baf86ca52100e8742ea43d2c342cf4d75b94f8a85454cf44fd108cdd71d5" +dependencies = [ + "futures", + "once_cell", + "wit-bindgen", +] + +[[package]] +name = "spin-macro" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "988ffe27470862bf28fe9b4f0268361040d4732cd86bcaebe45aa3d3b3e3d896" +dependencies = [ + "anyhow", + "bytes", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "spin-sdk" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f845e889d8431740806e04704ac5aa619466dfaef626f3c15952ecf823913e01" +dependencies = [ + "anyhow", + "async-trait", + "bytes", + "chrono", + "form_urlencoded", + "futures", + "http", + "once_cell", + "routefinder", + "serde", + "serde_json", + "spin-executor", + "spin-macro", + "thiserror", + "wit-bindgen", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.106", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.38.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad2b51884de9c7f4fe2fd1043fccb8dcad4b1e29558146ee57a144d15779f3f" +dependencies = [ + "leb128", +] + +[[package]] +name = "wasm-encoder" +version = "0.41.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "972f97a5d8318f908dded23594188a90bcd09365986b1163e66d70170e5287ae" +dependencies = [ + "leb128", +] + +[[package]] +name = "wasm-metadata" +version = "0.10.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18ebaa7bd0f9e7a5e5dd29b9a998acf21c4abed74265524dd7e85934597bfb10" +dependencies = [ + "anyhow", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "wasm-encoder 0.41.2", + "wasmparser 0.121.2", +] + +[[package]] +name = "wasmparser" +version = "0.118.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77f1154f1ab868e2a01d9834a805faca7bf8b50d041b4ca714d005d0dab1c50c" +dependencies = [ + "indexmap", + "semver", +] + +[[package]] +name = "wasmparser" +version = "0.121.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dbe55c8f9d0dbd25d9447a5a889ff90c0cc3feaa7395310d3d826b2c703eaab" +dependencies = [ + "bitflags", + "indexmap", + "semver", +] + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "wit-bindgen" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b76f1d099678b4f69402a421e888bbe71bf20320c2f3f3565d0e7484dbe5bc20" +dependencies = [ + "bitflags", + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75d55e1a488af2981fb0edac80d8d20a51ac36897a1bdef4abde33c29c1b6d0d" +dependencies = [ + "anyhow", + "wit-component", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01ff9cae7bf5736750d94d91eb8a49f5e3a04aff1d1a3218287d9b2964510f8" +dependencies = [ + "anyhow", + "heck", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "804a98e2538393d47aa7da65a7348116d6ff403b426665152b70a168c0146d49" +dependencies = [ + "anyhow", + "proc-macro2", + "quote", + "syn 2.0.106", + "wit-bindgen-core", + "wit-bindgen-rust", + "wit-component", +] + +[[package]] +name = "wit-component" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a35a2a9992898c9d27f1664001860595a4bc99d32dd3599d547412e17d7e2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder 0.38.1", + "wasm-metadata", + "wasmparser 0.118.2", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "316b36a9f0005f5aa4b03c39bc3728d045df136f8c13a73b7db4510dec725e08" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", +] diff --git a/examples/open-ai-rust/Cargo.toml b/examples/open-ai-rust/Cargo.toml new file mode 100644 index 0000000000..ab70fa8740 --- /dev/null +++ b/examples/open-ai-rust/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "open-ai-rust" +authors = ["Fermyon Engineering "] +description = "Example showing using openAI with Spin" +version = "0.1.0" +rust-version = "1.78" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +anyhow = "1" +spin-sdk = "3.1.0" + +[workspace] diff --git a/examples/open-ai-rust/runtime-config.toml b/examples/open-ai-rust/runtime-config.toml new file mode 100644 index 0000000000..ee86d9f061 --- /dev/null +++ b/examples/open-ai-rust/runtime-config.toml @@ -0,0 +1,5 @@ +[llm_compute] +type = "remote_http" +url = "http://localhost:11434" +auth_token = "your-secret-token" +api_type = "open_ai" diff --git a/examples/open-ai-rust/spin.toml b/examples/open-ai-rust/spin.toml new file mode 100644 index 0000000000..3ea99345b7 --- /dev/null +++ b/examples/open-ai-rust/spin.toml @@ -0,0 +1,22 @@ +#:schema https://schemas.spinframework.dev/spin/manifest-v2/latest.json + +spin_manifest_version = 2 + +[application] +name = "open-ai-rust" +version = "0.1.0" +authors = ["Fermyon Engineering "] +description = "Example showing using openAI with Spin" + +[[trigger.http]] +route = "/..." +component = "open-ai-rust" + +[component.open-ai-rust] +source = "target/wasm32-wasip1/release/open_ai_rust.wasm" +allowed_outbound_hosts = [] +ai_models = ["gpt-oss:20b"] + +[component.open-ai-rust.build] +command = "cargo build --target wasm32-wasip1 --release" +watch = ["src/**/*.rs", "Cargo.toml"] diff --git a/examples/open-ai-rust/src/lib.rs b/examples/open-ai-rust/src/lib.rs new file mode 100644 index 0000000000..06609910bc --- /dev/null +++ b/examples/open-ai-rust/src/lib.rs @@ -0,0 +1,22 @@ +use spin_sdk::http::{IntoResponse, Request, Response}; +use spin_sdk::http_component; + +/// A simple Spin HTTP component. +#[http_component] +fn handle_open_ai_rust(req: Request) -> anyhow::Result { + let llm_chat = spin_sdk::llm::infer( + spin_sdk::llm::InferencingModel::Other("gpt-oss:20b"), + "tell me about Epe in Lagos, Nigeria", + )?; + + println!("Handling request to {:?}", req.header("spin-full-url")); + + Ok(Response::builder() + .status(200) + .header("content-type", "text/plain") + .body(format!( + "Here's your response: {}\n Total tokens used: {}", + llm_chat.text, llm_chat.usage.prompt_token_count + )) + .build()) +}