Skip to content

Commit

Permalink
fix: openai error messages (#1436)
Browse files Browse the repository at this point in the history
  • Loading branch information
mikbry authored Dec 20, 2024
1 parent 43eac59 commit 06d9929
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 10 deletions.
24 changes: 19 additions & 5 deletions webapp/native/src/providers/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,20 @@ use crate::{

use super::llm::{ LlmImageGenerationResponse, LlmModelsResponse };

#[serde_with::skip_serializing_none]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OpenAIError {
pub message: String,
#[serde(rename = "type")]
pub error_type: String,
}

#[serde_with::skip_serializing_none]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OpenAIErrorResponse {
pub error: OpenAIError,
}

#[serde_with::skip_serializing_none]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OpenAIBodyCompletion {
Expand Down Expand Up @@ -187,7 +201,7 @@ impl OpenAIChatCompletion {
total_tokens: 0,
},
// TODO implement usage
// see : https://community.openai.com/t/why-there-is-no-usage-object-returned-with-streaming-api-call/385160/15
// see :https://community.openai.com/t/usage-stats-now-available-when-using-streaming-with-the-chat-completions-api-or-completions-api/738156/3
}
}

Expand Down Expand Up @@ -292,15 +306,15 @@ async fn request<R: Runtime>(
};
let status = response.status();
if !status.is_success() {
let error = match response.json::<LlmResponseError>().await {
let error = match response.json::<OpenAIErrorResponse>().await {
Ok(t) => t,
Err(error) => {
println!("Failed to dezerialize error response: {}", error);
return Err(Box::new(error));
}
};
println!("Failed to get response: {} {:?}", status, error);
return Err(Box::new(error.error));
return Err(Box::new(LlmError::new(&error.error.message, status.as_str())));
}
let response = match response.json::<OpenAIChatCompletion>().await {
Ok(r) => r,
Expand Down Expand Up @@ -374,7 +388,7 @@ async fn stream_request<R: Runtime>(
};
let status = response.status();
if !status.is_success() {
let error = match response.json::<LlmResponseError>().await {
let error = match response.json::<OpenAIErrorResponse>().await {
Ok(t) => t,
Err(error) => {
let message = format!("Failed to deserialize error response: {}", error);
Expand All @@ -396,7 +410,7 @@ async fn stream_request<R: Runtime>(
}
None => (),
}
return Err(Box::new(error.error));
return Err(Box::new(LlmError::new(&error.error.message, status.as_str())));
}
let mut stream = response.bytes_stream().eventsource();
let mut chunks: Vec<OpenAIChatCompletionChunk> = vec![];
Expand Down
8 changes: 3 additions & 5 deletions webapp/utils/messages/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,11 +122,9 @@ export const sendMessage = async (
onSuccess(response.usage);
returnedMessage.content = response.content.trim();
} */
} catch (e: any) {
} catch (e: unknown) {
logger.error('sendMessage', e, typeof e, activeService.provider?.errors);
const error = String(e);
/* onError(conversation.id, error);
setErrorMessage({ ...errorMessage, [conversation.id]: error }); */
const error = String(e).replaceAll('"', '');
onError(conversation.id, error);
returnedMessage.content = error;
returnedMessage.status = MessageStatus.Error;
Expand All @@ -142,7 +140,7 @@ export const sendMessage = async (
setProviders(updatedProviders);
}

toast.error(String(e));
toast.error(error);
}
if (returnedMessage.status !== MessageStatus.Error) {
returnedMessage.status = MessageStatus.Delivered;
Expand Down

0 comments on commit 06d9929

Please sign in to comment.