From 1b82993ddb14ef9555044c2107095f0592bbd493 Mon Sep 17 00:00:00 2001 From: jekalmin Date: Tue, 12 Nov 2024 20:18:49 +0900 Subject: [PATCH] [no-issue] change default model (gpt-3.5-turbo-1106 -> gpt-4o-mini) --- README.md | 4 ++-- custom_components/extended_openai_conversation/const.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 50fc68f..334e426 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Derived from [OpenAI Conversation](https://www.home-assistant.io/integrations/op ## How it works Extended OpenAI Conversation uses OpenAI API's feature of [function calling](https://platform.openai.com/docs/guides/function-calling) to call service of Home Assistant. -Since "gpt-3.5-turbo" model already knows how to call service of Home Assistant in general, you just have to let model know what devices you have by [exposing entities](https://github.com/jekalmin/extended_openai_conversation#preparation) +Since OpenAI models already know how to call service of Home Assistant in general, you just have to let model know what devices you have by [exposing entities](https://github.com/jekalmin/extended_openai_conversation#preparation) ## Installation 1. Install via registering as a custom repository of HACS or by copying `extended_openai_conversation` folder into `/custom_components` @@ -515,7 +515,7 @@ When using [ytube_music_player](https://github.com/KoljaWindeler/ytube_music_pla #### 7-1. Let model generate a query - Without examples, a query tries to fetch data only from "states" table like below > Question: When did bedroom light turn on?
- Query(generated by gpt-3.5): SELECT * FROM states WHERE entity_id = 'input_boolean.livingroom_light_2' AND state = 'on' ORDER BY last_changed DESC LIMIT 1 + Query(generated by gpt): SELECT * FROM states WHERE entity_id = 'input_boolean.livingroom_light_2' AND state = 'on' ORDER BY last_changed DESC LIMIT 1 - Since "entity_id" is stored in "states_meta" table, we need to give examples of question and query. - Not secured, but flexible way diff --git a/custom_components/extended_openai_conversation/const.py b/custom_components/extended_openai_conversation/const.py index 45b6b5f..68405d1 100644 --- a/custom_components/extended_openai_conversation/const.py +++ b/custom_components/extended_openai_conversation/const.py @@ -32,7 +32,7 @@ Do not restate or appreciate what user says, rather make a quick inquiry. """ CONF_CHAT_MODEL = "chat_model" -DEFAULT_CHAT_MODEL = "gpt-3.5-turbo-1106" +DEFAULT_CHAT_MODEL = "gpt-4o-mini" CONF_MAX_TOKENS = "max_tokens" DEFAULT_MAX_TOKENS = 150 CONF_TOP_P = "top_p"