From 06d99299741d43c9c28dac3b3f4d2881f78a0252 Mon Sep 17 00:00:00 2001 From: Mik Date: Fri, 20 Dec 2024 13:21:26 +0100 Subject: [PATCH] fix: openai error messages (#1436) --- webapp/native/src/providers/openai.rs | 24 +++++++++++++++++++----- webapp/utils/messages/index.ts | 8 +++----- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/webapp/native/src/providers/openai.rs b/webapp/native/src/providers/openai.rs index 479a4291..6e4cd692 100644 --- a/webapp/native/src/providers/openai.rs +++ b/webapp/native/src/providers/openai.rs @@ -36,6 +36,20 @@ use crate::{ use super::llm::{ LlmImageGenerationResponse, LlmModelsResponse }; +#[serde_with::skip_serializing_none] +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OpenAIError { + pub message: String, + #[serde(rename = "type")] + pub error_type: String, +} + +#[serde_with::skip_serializing_none] +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OpenAIErrorResponse { + pub error: OpenAIError, +} + #[serde_with::skip_serializing_none] #[derive(Clone, Debug, Serialize, Deserialize)] pub struct OpenAIBodyCompletion { @@ -187,7 +201,7 @@ impl OpenAIChatCompletion { total_tokens: 0, }, // TODO implement usage - // see : https://community.openai.com/t/why-there-is-no-usage-object-returned-with-streaming-api-call/385160/15 + // see :https://community.openai.com/t/usage-stats-now-available-when-using-streaming-with-the-chat-completions-api-or-completions-api/738156/3 } } @@ -292,7 +306,7 @@ async fn request( }; let status = response.status(); if !status.is_success() { - let error = match response.json::().await { + let error = match response.json::().await { Ok(t) => t, Err(error) => { println!("Failed to dezerialize error response: {}", error); @@ -300,7 +314,7 @@ async fn request( } }; println!("Failed to get response: {} {:?}", status, error); - return Err(Box::new(error.error)); + return Err(Box::new(LlmError::new(&error.error.message, status.as_str()))); } let response = match response.json::().await { Ok(r) => r, @@ -374,7 +388,7 @@ async fn stream_request( }; let status = response.status(); if !status.is_success() { - let error = match response.json::().await { + let error = match response.json::().await { Ok(t) => t, Err(error) => { let message = format!("Failed to deserialize error response: {}", error); @@ -396,7 +410,7 @@ async fn stream_request( } None => (), } - return Err(Box::new(error.error)); + return Err(Box::new(LlmError::new(&error.error.message, status.as_str()))); } let mut stream = response.bytes_stream().eventsource(); let mut chunks: Vec = vec![]; diff --git a/webapp/utils/messages/index.ts b/webapp/utils/messages/index.ts index 9f29e232..4ffa02fe 100644 --- a/webapp/utils/messages/index.ts +++ b/webapp/utils/messages/index.ts @@ -122,11 +122,9 @@ export const sendMessage = async ( onSuccess(response.usage); returnedMessage.content = response.content.trim(); } */ - } catch (e: any) { + } catch (e: unknown) { logger.error('sendMessage', e, typeof e, activeService.provider?.errors); - const error = String(e); - /* onError(conversation.id, error); - setErrorMessage({ ...errorMessage, [conversation.id]: error }); */ + const error = String(e).replaceAll('"', ''); onError(conversation.id, error); returnedMessage.content = error; returnedMessage.status = MessageStatus.Error; @@ -142,7 +140,7 @@ export const sendMessage = async ( setProviders(updatedProviders); } - toast.error(String(e)); + toast.error(error); } if (returnedMessage.status !== MessageStatus.Error) { returnedMessage.status = MessageStatus.Delivered;