From b41689ed99c8f0b14ac01baa7ef0fc3131901298 Mon Sep 17 00:00:00 2001 From: "maple@max" Date: Wed, 5 Jun 2024 18:04:12 +0800 Subject: [PATCH 1/5] update --- async-openai-wasm/src/client.rs | 71 +++++++++++++++++-- .../src/types/assistant_stream.rs | 8 +-- 2 files changed, 67 insertions(+), 12 deletions(-) diff --git a/async-openai-wasm/src/client.rs b/async-openai-wasm/src/client.rs index 5605e47..78453b5 100644 --- a/async-openai-wasm/src/client.rs +++ b/async-openai-wasm/src/client.rs @@ -446,10 +446,10 @@ impl Client { path: &str, request: I, event_mapper: impl Fn(eventsource_stream::Event) -> Result + Send + 'static, - ) -> Pin> + Send>> + ) -> OpenAIEventMappedStream where I: Serialize, - O: DeserializeOwned + Send + 'static, + O: DeserializeOwned + Send + 'static { let event_source = self .http_client @@ -460,8 +460,7 @@ impl Client { .eventsource() .unwrap(); - // stream_mapped_raw_events(event_source, event_mapper).await - todo!() + OpenAIEventMappedStream::new(event_source, event_mapper) } /// Make HTTP GET request to receive SSE @@ -491,13 +490,13 @@ impl Client { /// Request which responds with SSE. /// [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#event_stream_format) #[pin_project] -pub struct OpenAIEventStream { +pub struct OpenAIEventStream { #[pin] stream: Filter, fn(&Result) -> future::Ready>, _phantom_data: PhantomData, } -impl OpenAIEventStream { +impl OpenAIEventStream { pub(crate) fn new(event_source: EventSource) -> Self { Self { stream: event_source.filter(|result| @@ -543,6 +542,66 @@ impl Stream for OpenAIEventStream { } } +#[pin_project] +pub struct OpenAIEventMappedStream + where O: Send + 'static +{ + #[pin] + stream: Filter, fn(&Result) -> future::Ready>, + event_mapper: Box Result + Send + 'static>, + _phantom_data: PhantomData, +} + +impl OpenAIEventMappedStream + where O: Send + 'static +{ + pub(crate) fn new(event_source: EventSource, event_mapper: M) -> Self + where M: Fn(eventsource_stream::Event) -> Result + Send + 'static { + Self { + stream: event_source.filter(|result| + // filter out the first event which is always Event::Open + future::ready(!(result.is_ok() && result.as_ref().unwrap().eq(&Event::Open))) + ), + event_mapper: Box::new(event_mapper), + _phantom_data: PhantomData, + } + } +} + + +impl Stream for OpenAIEventMappedStream + where O: Send + 'static +{ + type Item = Result; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let this = self.project(); + let stream: Pin<&mut _> = this.stream; + match stream.poll_next(cx) { + Poll::Ready(response) => { + match response { + None => Poll::Ready(None), // end of the stream + Some(result) => match result { + Ok(event) => match event { + Event::Open => unreachable!(), // it has been filtered out + Event::Message(message) => { + if message.data == "[DONE]" { + Poll::Ready(None) // end of the stream, defined by OpenAI + } else { + todo!() + } + } + } + Err(e) => Poll::Ready(Some(Err(OpenAIError::StreamError(e.to_string())))) + } + } + } + Poll::Pending => Poll::Pending + } + } +} + + // pub(crate) async fn stream_mapped_raw_events( // mut event_source: EventSource, // event_mapper: impl Fn(eventsource_stream::Event) -> Result + Send + 'static, diff --git a/async-openai-wasm/src/types/assistant_stream.rs b/async-openai-wasm/src/types/assistant_stream.rs index 2f9dea7..3407efa 100644 --- a/async-openai-wasm/src/types/assistant_stream.rs +++ b/async-openai-wasm/src/types/assistant_stream.rs @@ -1,8 +1,6 @@ -use std::pin::Pin; - -use futures::Stream; use serde::Deserialize; +use crate::client::OpenAIEventMappedStream; use crate::error::{ApiError, map_deserialization_error, OpenAIError}; use super::{ @@ -28,7 +26,6 @@ use super::{ /// We may add additional events over time, so we recommend handling unknown events gracefully /// in your code. See the [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) to learn how to /// integrate the Assistants API with streaming. - #[derive(Debug, Deserialize, Clone)] #[serde(tag = "event", content = "data")] #[non_exhaustive] @@ -110,8 +107,7 @@ pub enum AssistantStreamEvent { Done(String), } -pub type AssistantEventStream = - Pin> + Send>>; +pub type AssistantEventStream = OpenAIEventMappedStream; impl TryFrom for AssistantStreamEvent { type Error = OpenAIError; From 2e8a62ba199cb32392e0965c252bd98e7563752b Mon Sep 17 00:00:00 2001 From: "maple@max" Date: Wed, 5 Jun 2024 21:38:11 +0800 Subject: [PATCH 2/5] finish Stream impl for OpenAIEventMappedStream --- async-openai-wasm/src/client.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/async-openai-wasm/src/client.rs b/async-openai-wasm/src/client.rs index 78453b5..9a92cd4 100644 --- a/async-openai-wasm/src/client.rs +++ b/async-openai-wasm/src/client.rs @@ -549,6 +549,7 @@ pub struct OpenAIEventMappedStream #[pin] stream: Filter, fn(&Result) -> future::Ready>, event_mapper: Box Result + Send + 'static>, + done: bool, _phantom_data: PhantomData, } @@ -562,6 +563,7 @@ impl OpenAIEventMappedStream // filter out the first event which is always Event::Open future::ready(!(result.is_ok() && result.as_ref().unwrap().eq(&Event::Open))) ), + done: false, event_mapper: Box::new(event_mapper), _phantom_data: PhantomData, } @@ -574,8 +576,12 @@ impl Stream for OpenAIEventMappedStream { type Item = Result; + // TODO: test this fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let this = self.project(); + if *this.done { + return Poll::Ready(None); + } let stream: Pin<&mut _> = this.stream; match stream.poll_next(cx) { Poll::Ready(response) => { @@ -586,13 +592,19 @@ impl Stream for OpenAIEventMappedStream Event::Open => unreachable!(), // it has been filtered out Event::Message(message) => { if message.data == "[DONE]" { - Poll::Ready(None) // end of the stream, defined by OpenAI - } else { - todo!() + *this.done = true; + } + let response = (this.event_mapper)(message); + match response { + Ok(output) => Poll::Ready(Some(Ok(output))), + Err(_) => Poll::Ready(None) } } } - Err(e) => Poll::Ready(Some(Err(OpenAIError::StreamError(e.to_string())))) + Err(e) => { + *this.done = true; + Poll::Ready(Some(Err(OpenAIError::StreamError(e.to_string())))) + } } } } From eeedb9442fa83fa0e2c1ebe5dd36f657c3d3658a Mon Sep 17 00:00:00 2001 From: "maple@max" Date: Wed, 5 Jun 2024 21:41:05 +0800 Subject: [PATCH 3/5] add extra guard on OpenAIEventStream::poll_next --- async-openai-wasm/src/client.rs | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/async-openai-wasm/src/client.rs b/async-openai-wasm/src/client.rs index 9a92cd4..aa3cd7c 100644 --- a/async-openai-wasm/src/client.rs +++ b/async-openai-wasm/src/client.rs @@ -493,6 +493,7 @@ impl Client { pub struct OpenAIEventStream { #[pin] stream: Filter, fn(&Result) -> future::Ready>, + done: bool, _phantom_data: PhantomData, } @@ -503,6 +504,7 @@ impl OpenAIEventStream { // filter out the first event which is always Event::Open future::ready(!(result.is_ok() && result.as_ref().unwrap().eq(&Event::Open))) ), + done: false, _phantom_data: PhantomData, } } @@ -513,6 +515,9 @@ impl Stream for OpenAIEventStream { fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let this = self.project(); + if *this.done { + return Poll::Ready(None); + } let stream: Pin<&mut _> = this.stream; match stream.poll_next(cx) { Poll::Ready(response) => { @@ -523,17 +528,24 @@ impl Stream for OpenAIEventStream { Event::Open => unreachable!(), // it has been filtered out Event::Message(message) => { if message.data == "[DONE]" { + *this.done = true; Poll::Ready(None) // end of the stream, defined by OpenAI } else { // deserialize the data match serde_json::from_str::(&message.data) { - Err(e) => Poll::Ready(Some(Err(map_deserialization_error(e, &message.data.as_bytes())))), + Err(e) => { + *this.done = true; + Poll::Ready(Some(Err(map_deserialization_error(e, &message.data.as_bytes())))) + } Ok(output) => Poll::Ready(Some(Ok(output))), } } } } - Err(e) => Poll::Ready(Some(Err(OpenAIError::StreamError(e.to_string())))) + Err(e) => { + *this.done = true; + Poll::Ready(Some(Err(OpenAIError::StreamError(e.to_string())))) + } } } } From 8dab6af2bd6ccc1e356e31406b91d3b20a5f50bc Mon Sep 17 00:00:00 2001 From: "maple@max" Date: Thu, 6 Jun 2024 00:20:41 +0800 Subject: [PATCH 4/5] update example --- .../.gitignore | 0 .../Cargo.toml | 12 +-- examples/openai-web-app-chat/Dioxus.toml | 40 ++++++++ examples/openai-web-app-chat/README.md | 10 ++ examples/openai-web-app-chat/src/main.rs | 68 +++++++++++++ examples/openai-web-app/README.md | 11 --- examples/openai-web-app/src/main.rs | 95 ------------------- 7 files changed, 124 insertions(+), 112 deletions(-) rename examples/{openai-web-app => openai-web-app-chat}/.gitignore (100%) rename examples/{openai-web-app => openai-web-app-chat}/Cargo.toml (61%) create mode 100644 examples/openai-web-app-chat/Dioxus.toml create mode 100644 examples/openai-web-app-chat/README.md create mode 100644 examples/openai-web-app-chat/src/main.rs delete mode 100644 examples/openai-web-app/README.md delete mode 100644 examples/openai-web-app/src/main.rs diff --git a/examples/openai-web-app/.gitignore b/examples/openai-web-app-chat/.gitignore similarity index 100% rename from examples/openai-web-app/.gitignore rename to examples/openai-web-app-chat/.gitignore diff --git a/examples/openai-web-app/Cargo.toml b/examples/openai-web-app-chat/Cargo.toml similarity index 61% rename from examples/openai-web-app/Cargo.toml rename to examples/openai-web-app-chat/Cargo.toml index 269966e..c2d7bae 100644 --- a/examples/openai-web-app/Cargo.toml +++ b/examples/openai-web-app-chat/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "openai-web-app" +name = "openai-web-app-chat" version = "0.1.0" edition = "2021" publish = false @@ -7,9 +7,9 @@ publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -dioxus = "~0.4" -dioxus-web = "~0.4" -futures = "0.3.28" +dioxus = {version = "~0.5", features = ["web"]} +futures = "0.3.30" async-openai-wasm = { path = "../../async-openai-wasm" } -console_log = "1.0" -log = "0.4" \ No newline at end of file +# Debug +tracing = "0.1.40" +dioxus-logger = "~0.5" \ No newline at end of file diff --git a/examples/openai-web-app-chat/Dioxus.toml b/examples/openai-web-app-chat/Dioxus.toml new file mode 100644 index 0000000..243f241 --- /dev/null +++ b/examples/openai-web-app-chat/Dioxus.toml @@ -0,0 +1,40 @@ +[application] + +# App (Project) Name +name = "openai-web-app-chat-dioxus" + +# Dioxus App Default Platform +# desktop, web +default_platform = "web" + +# `build` & `serve` dist path +out_dir = "dist" + +[web.app] + +# HTML title tag content +title = "openai-web-app-chat-dioxus" + +[web.watcher] + +# when watcher trigger, regenerate the `index.html` +reload_html = true + +# which files or dirs will be watcher monitoring +watch_path = ["src"] + +# include `assets` in web platform +[web.resource] + +# CSS style file + +style = [] + +# Javascript code file +script = [] + +[web.resource.dev] + +# Javascript code file +# serve: [dev-server] only +script = [] diff --git a/examples/openai-web-app-chat/README.md b/examples/openai-web-app-chat/README.md new file mode 100644 index 0000000..f8233cc --- /dev/null +++ b/examples/openai-web-app-chat/README.md @@ -0,0 +1,10 @@ +# OpenAI Web App - Chat + +This builds a `dioxus` web App that uses OpenAI ChatCompletion APIs to generate text. + +To run it, you need: +1. Set OpenAI secrets in `./src/main.rs`. Please do NOT take this demo into production without using a secure secret store +2. Install `dioxus-cli` by `cargo install dioxus-cli`. +3. Run `dx serve` + +Note: Safari may not work due to CORS issues. Please use Chrome or Edge. \ No newline at end of file diff --git a/examples/openai-web-app-chat/src/main.rs b/examples/openai-web-app-chat/src/main.rs new file mode 100644 index 0000000..c9ff70b --- /dev/null +++ b/examples/openai-web-app-chat/src/main.rs @@ -0,0 +1,68 @@ +#![allow(non_snake_case)] + +use dioxus::prelude::*; +use dioxus_logger::tracing::{Level, info, error}; +use futures::stream::StreamExt; +use async_openai_wasm::Client; +use async_openai_wasm::config::OpenAIConfig; +use async_openai_wasm::types::{ChatCompletionRequestMessage, ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs}; + + +const API_BASE: &str = "..."; +const API_KEY: &str = "..."; + +pub fn App() -> Element { + const GREETING: &str = "Hello! How are you?"; + let request = CreateChatCompletionRequestArgs::default() + .max_tokens(512u16) + .model("gpt-3.5-turbo") + .messages([ + ChatCompletionRequestMessage::User( + ChatCompletionRequestUserMessageArgs::default() + .content(GREETING) + .build() + .unwrap() + ) + ]) + .build().unwrap(); + let response_string = use_signal(String::new); + let _fetch_completion_chunks: Coroutine<()> = use_coroutine(|_rx| { + let mut response_string = response_string.to_owned(); + async move { + let config = OpenAIConfig::new() + .with_api_key(API_KEY); + let config = if API_BASE != "..." { + config.with_api_base(API_BASE) + } else { + config + }; + let client = Client::with_config(config); + let mut stream = client.chat().create_stream(request).await.unwrap(); + while let Some(chunk) = stream.next().await { + match chunk { + Ok(response) => + response_string.with_mut(|string| { + if let Some(content) = response.choices[0].delta.content.as_ref() { + info!("Response chunk: {:?}", content); + string.push_str(content); + } + }), + Err(e) => error!("OpenAI Error: {:?}", e) + } + } + } + }); + + rsx! { + div { + p { "Using OpenAI" } + p { "User: {GREETING}" } + p { "Assistant: {response_string}" } + } + } +} + +fn main() { + dioxus_logger::init(Level::INFO).expect("failed to init logger"); + launch(App); +} \ No newline at end of file diff --git a/examples/openai-web-app/README.md b/examples/openai-web-app/README.md deleted file mode 100644 index b038389..0000000 --- a/examples/openai-web-app/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# OpenAI Web App - -This builds a `dioxus` web App that uses Azure OpenAI Services or OpenAI to generate text. - -To run it, you need: -1. Set `USE_AZURE` in `./src/main.rs` -2. Set Azure OpenAI or OpenAI secrets in `./src/main.rs`. Please do NOT take this demo into production without using a secure secret store -3. Install `dioxus-cli` by `cargo install dioxus-cli --locked`. -4. Run `dx serve` - -Note: Safari may not work due to CORS issues. Please use Chrome or Edge. \ No newline at end of file diff --git a/examples/openai-web-app/src/main.rs b/examples/openai-web-app/src/main.rs deleted file mode 100644 index b0f5a15..0000000 --- a/examples/openai-web-app/src/main.rs +++ /dev/null @@ -1,95 +0,0 @@ -use dioxus::prelude::*; -use futures::stream::StreamExt; -use log::Level; - -use async_openai_wasm::types::{ChatCompletionRequestMessage, ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs}; - -const USE_AZURE: bool = false; - -const API_BASE: &str = "..."; -const API_KEY: &str = "..."; -const API_VERSION: &str = "..."; -const DEPLOYMENT_ID: &str = "..."; - -pub fn app(cx: Scope) -> Element { - const GREETING: &str = "Hello! How are you?"; - let request = CreateChatCompletionRequestArgs::default() - .max_tokens(512u16) - .model("gpt-3.5-turbo-0613") - .messages([ - ChatCompletionRequestMessage::User( - ChatCompletionRequestUserMessageArgs::default() - .content(GREETING) - .build() - .unwrap() - ) - ]) - .build().unwrap(); - let response_string: &UseRef = use_ref(cx, String::new); - let _fetch_completion_chunks: &Coroutine<()> = use_coroutine(cx, |_rx| { - let response_string = response_string.to_owned(); - async move { - let mut stream = if USE_AZURE { - let config = async_openai_wasm::config::AzureConfig::new() - .with_api_base(API_BASE) - .with_api_key(API_KEY) - .with_api_version(API_VERSION) - .with_deployment_id(DEPLOYMENT_ID); - let client = async_openai_wasm::Client::with_config(config); - client.chat().create_stream(request).await.unwrap() - } else { - let config = async_openai_wasm::config::OpenAIConfig::new() - .with_api_key(API_KEY); - let config = if API_BASE != "..." { - config.with_api_base(API_BASE) - } else { - config - }; - let client = async_openai_wasm::Client::with_config(config); - client.chat().create_stream(request).await.unwrap() - }; - while let Some(chunk) = stream.next().await { - match chunk { - Ok(response) => { - if response.choices.is_empty() { - // azure openai service returns empty response on first call - continue; - } - response_string.with_mut(|string| { - if let Some(content) = response.choices[0].delta.content.as_ref() { - string.push_str(content); - } - }) - } - Err(e) => { - log::error!("OpenAI Error: {:?}", e); - } - } - } - } - }); - - render! { - div { - p { - if USE_AZURE { - "Using Azure OpenAI" - } else { - "Using OpenAI" - } - } - p { - "User: {GREETING}" - } - p { - "Assistant: {response_string.read()}" - } - } - } -} - - -fn main() { - console_log::init_with_level(Level::Debug).unwrap(); - dioxus_web::launch(app); -} \ No newline at end of file From c64d80b6daabbe171ef8347bd3309395765892fe Mon Sep 17 00:00:00 2001 From: "maple@max" Date: Tue, 11 Jun 2024 22:41:10 +0800 Subject: [PATCH 5/5] add Assistant example --- async-openai-wasm/src/client.rs | 1 - examples/openai-web-app-assistant/.gitignore | 1 + examples/openai-web-app-assistant/Cargo.toml | 16 ++ examples/openai-web-app-assistant/Dioxus.toml | 40 +++++ examples/openai-web-app-assistant/README.md | 14 ++ examples/openai-web-app-assistant/src/main.rs | 98 ++++++++++++ .../openai-web-app-assistant/src/utils.rs | 148 ++++++++++++++++++ 7 files changed, 317 insertions(+), 1 deletion(-) create mode 100644 examples/openai-web-app-assistant/.gitignore create mode 100644 examples/openai-web-app-assistant/Cargo.toml create mode 100644 examples/openai-web-app-assistant/Dioxus.toml create mode 100644 examples/openai-web-app-assistant/README.md create mode 100644 examples/openai-web-app-assistant/src/main.rs create mode 100644 examples/openai-web-app-assistant/src/utils.rs diff --git a/async-openai-wasm/src/client.rs b/async-openai-wasm/src/client.rs index aa3cd7c..3842f02 100644 --- a/async-openai-wasm/src/client.rs +++ b/async-openai-wasm/src/client.rs @@ -588,7 +588,6 @@ impl Stream for OpenAIEventMappedStream { type Item = Result; - // TODO: test this fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let this = self.project(); if *this.done { diff --git a/examples/openai-web-app-assistant/.gitignore b/examples/openai-web-app-assistant/.gitignore new file mode 100644 index 0000000..3e22129 --- /dev/null +++ b/examples/openai-web-app-assistant/.gitignore @@ -0,0 +1 @@ +/dist \ No newline at end of file diff --git a/examples/openai-web-app-assistant/Cargo.toml b/examples/openai-web-app-assistant/Cargo.toml new file mode 100644 index 0000000..b83e8f2 --- /dev/null +++ b/examples/openai-web-app-assistant/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "openai-web-assistant-chat" +version = "0.1.0" +edition = "2021" +publish = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +dioxus = {version = "~0.5", features = ["web"]} +futures = "0.3.30" +async-openai-wasm = { path = "../../async-openai-wasm" } +# Debug +tracing = "0.1.40" +dioxus-logger = "~0.5" +serde_json = "1.0.117" \ No newline at end of file diff --git a/examples/openai-web-app-assistant/Dioxus.toml b/examples/openai-web-app-assistant/Dioxus.toml new file mode 100644 index 0000000..39bc33f --- /dev/null +++ b/examples/openai-web-app-assistant/Dioxus.toml @@ -0,0 +1,40 @@ +[application] + +# App (Project) Name +name = "openai-web-app-assistant-dioxus" + +# Dioxus App Default Platform +# desktop, web +default_platform = "web" + +# `build` & `serve` dist path +out_dir = "dist" + +[web.app] + +# HTML title tag content +title = "openai-web-app-assistant-dioxus" + +[web.watcher] + +# when watcher trigger, regenerate the `index.html` +reload_html = true + +# which files or dirs will be watcher monitoring +watch_path = ["src"] + +# include `assets` in web platform +[web.resource] + +# CSS style file + +style = [] + +# Javascript code file +script = [] + +[web.resource.dev] + +# Javascript code file +# serve: [dev-server] only +script = [] diff --git a/examples/openai-web-app-assistant/README.md b/examples/openai-web-app-assistant/README.md new file mode 100644 index 0000000..74996e0 --- /dev/null +++ b/examples/openai-web-app-assistant/README.md @@ -0,0 +1,14 @@ +# OpenAI Web App - Assistant + +This builds a `dioxus` web App that uses OpenAI Assistant APIs to generate text. + +To run it, you need: +1. Set OpenAI secrets in `./src/main.rs`. Please do NOT take this demo into production without using a secure secret store +2. Install `dioxus-cli` by `cargo install dioxus-cli`. +3. Run `dx serve` + +Note: Safari may not work due to CORS issues. Please use Chrome or Edge. + +## Reference + +The code is adapted from [assistant-func-call-stream example in async-openai](https://github.com/64bit/async-openai/tree/main/examples/assistants-func-call-stream). \ No newline at end of file diff --git a/examples/openai-web-app-assistant/src/main.rs b/examples/openai-web-app-assistant/src/main.rs new file mode 100644 index 0000000..bfe5999 --- /dev/null +++ b/examples/openai-web-app-assistant/src/main.rs @@ -0,0 +1,98 @@ +#![allow(non_snake_case)] + +use dioxus::prelude::*; +use dioxus_logger::tracing::{error, info, Level}; +use futures::stream::StreamExt; + +use async_openai_wasm::types::{AssistantStreamEvent, CreateMessageRequest, CreateRunRequest, CreateThreadRequest, MessageRole}; + +use crate::utils::*; + +mod utils; + +pub const API_BASE: &str = "..."; +pub const API_KEY: &str = "..."; + + +pub fn App() -> Element { + const QUERY: &str = "What's the weather in San Francisco today and the likelihood it'll rain?"; + let reply = use_signal(String::new); + let _run_assistant: Coroutine<()> = use_coroutine(|_rx| { + let client = get_client(); + async move { + // + // Step 1: Define functions + // + let assistant = client + .assistants() + .create(create_assistant_request()) + .await + .expect("failed to create assistant"); + // + // Step 2: Create a Thread and add Messages + // + let thread = client + .threads() + .create(CreateThreadRequest::default()) + .await + .expect("failed to create thread"); + let _message = client + .threads() + .messages(&thread.id) + .create(CreateMessageRequest { + role: MessageRole::User, + content: QUERY.into(), + ..Default::default() + }) + .await + .expect("failed to create message"); + // + // Step 3: Initiate a Run + // + let mut event_stream = client + .threads() + .runs(&thread.id) + .create_stream(CreateRunRequest { + assistant_id: assistant.id.clone(), + stream: Some(true), + ..Default::default() + }) + .await + .expect("failed to create run"); + + + while let Some(event) = event_stream.next().await { + match event { + Ok(event) => match event { + AssistantStreamEvent::ThreadRunRequiresAction(run_object) => { + info!("thread.run.requires_action: run_id:{}", run_object.id); + handle_requires_action(&client, run_object, reply.to_owned()).await + } + _ => info!("\nEvent: {event:?}\n"), + }, + Err(e) => { + error!("Error: {e}"); + } + } + } + + client.threads().delete(&thread.id).await.expect("failed to delete thread"); + client.assistants().delete(&assistant.id).await.expect("failed to delete assistant"); + info!("Done!"); + } + }); + + rsx! { + div { + p { "Using OpenAI" } + p { "User: {QUERY}" } + p { "Expected Stats (Debug): temperature = {TEMPERATURE}, rain_probability = {RAIN_PROBABILITY}" } + p { "Assistant: {reply}" } + } + } +} + +fn main() { + dioxus_logger::init(Level::INFO).expect("failed to init logger"); + launch(App); +} \ No newline at end of file diff --git a/examples/openai-web-app-assistant/src/utils.rs b/examples/openai-web-app-assistant/src/utils.rs new file mode 100644 index 0000000..0d8a039 --- /dev/null +++ b/examples/openai-web-app-assistant/src/utils.rs @@ -0,0 +1,148 @@ +use std::error::Error; +use dioxus::prelude::Signal; +use futures::StreamExt; +use tracing::{error, info}; +use async_openai_wasm::Client; +use async_openai_wasm::config::OpenAIConfig; +use async_openai_wasm::types::{AssistantStreamEvent, CreateAssistantRequest, CreateAssistantRequestArgs, FunctionObject, MessageDeltaContent, RunObject, SubmitToolOutputsRunRequest, ToolsOutputs}; +use crate::{API_BASE, API_KEY}; +use dioxus::prelude::*; + + +pub const TEMPERATURE: &str = "57"; +pub const RAIN_PROBABILITY: &str = "0.06"; + +pub fn get_client() -> Client { + let config = OpenAIConfig::new() + .with_api_key(API_KEY); + let config = if API_BASE != "..." { + config.with_api_base(API_BASE) + } else { + config + }; + + Client::with_config(config) +} + +pub async fn handle_requires_action(client: &Client, run_object: RunObject, reply_signal: Signal) { + let mut tool_outputs: Vec = vec![]; + if let Some(ref required_action) = run_object.required_action { + for tool in &required_action.submit_tool_outputs.tool_calls { + if tool.function.name == "get_current_temperature" { + info!("get_current_temperature"); + tool_outputs.push(ToolsOutputs { + tool_call_id: Some(tool.id.clone()), + output: Some(TEMPERATURE.into()), + }) + } else if tool.function.name == "get_rain_probability" { + info!("get_rain_probability"); + tool_outputs.push(ToolsOutputs { + tool_call_id: Some(tool.id.clone()), + output: Some(RAIN_PROBABILITY.into()), + }) + } else { + error!("Unknown tool: {}", tool.function.name); + unreachable!(); + } + } + + if let Err(e) = submit_tool_outputs(client, run_object, tool_outputs, reply_signal).await { + error!("Error on submitting tool outputs: {e}"); + } + } +} + +pub async fn submit_tool_outputs( + client: &Client, + run_object: RunObject, + tool_outputs: Vec, + mut reply_signal: Signal, +) -> Result<(), Box> { + let mut event_stream = client + .threads() + .runs(&run_object.thread_id) + .submit_tool_outputs_stream( + &run_object.id, + SubmitToolOutputsRunRequest { + tool_outputs, + stream: Some(true), + }, + ) + .await?; + + while let Some(event) = event_stream.next().await { + match event { + Ok(event) => { + if let AssistantStreamEvent::ThreadMessageDelta(delta) = event { + if let Some(contents) = delta.delta.content { + for content in contents { + // only text is expected here and no images + if let MessageDeltaContent::Text(text) = content { + if let Some(text) = text.text { + if let Some(text) = text.value { + info!("After submitted tool results: {}", text); + reply_signal.with_mut(|reply| { + reply.push_str(&text); + }); + } + } + } + } + } + } + } + Err(e) => { + error!("Error: {e}"); + } + } + } + + Ok(()) +} + +pub fn create_assistant_request() -> CreateAssistantRequest { + CreateAssistantRequestArgs::default() + .instructions("You are a weather bot. Use the provided functions to answer questions.") + .model("gpt-4o") + .tools(vec![ + FunctionObject { + name: "get_current_temperature".into(), + description: Some("Get the current temperature for a specific location".into()), + parameters: Some(serde_json::json!( + { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g., San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["Celsius", "Fahrenheit"], + "description": "The temperature unit to use. Infer this from the user's location." + } + }, + "required": ["location", "unit"] + } + )), + }.into(), + FunctionObject { + name: "get_rain_probability".into(), + description: Some("Get the probability of rain for a specific location".into()), + parameters: Some(serde_json::json!( + { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g., San Francisco, CA" + } + }, + "required": ["location"] + } + )), + }.into(), + ]) + .build() + .expect("failed to build CreateAssistantRequestArgs") +} \ No newline at end of file