From 75d270fb93166c21ea33d36985f819346e4eefda Mon Sep 17 00:00:00 2001 From: yym68686 Date: Mon, 22 Apr 2024 12:26:09 +0800 Subject: [PATCH] =?UTF-8?q?=F0=9F=90=9B=20Bug:=20Fixed=20the=20bug=20where?= =?UTF-8?q?=20groq=20cannot=20switch=20models.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✨ Feature: Add supports for llama3-70b-8192 model of groq. 💻 Code: Upgrade TikToken version to 0.6.0 --- bot.py | 5 +++-- config.py | 7 ++----- requirements.txt | 2 +- utils/chatgpt2api.py | 17 ++++++++++------- 4 files changed, 16 insertions(+), 15 deletions(-) diff --git a/bot.py b/bot.py index ef8a146e..a23fdfa0 100644 --- a/bot.py +++ b/bot.py @@ -124,6 +124,7 @@ async def command_bot(update, context, language=None, prompt=translator_prompt, } } ) + # print("robot", robot) await context.bot.send_chat_action(chat_id=chatid, action=ChatAction.TYPING) await getChatGPT(update, context, title, robot, message, chatid, messageid) else: @@ -170,11 +171,10 @@ async def getChatGPT(update, context, title, robot, message, chatid, messageid): reply_to_message_id=messageid, ) messageid = message.message_id - get_answer = robot.ask_stream pass_history = config.PASS_HISTORY try: - for data in get_answer(text, convo_id=str(chatid), pass_history=pass_history): + for data in robot.ask_stream(text, convo_id=str(chatid), pass_history=pass_history): if "🌐" not in data: result = result + data tmpresult = result @@ -310,6 +310,7 @@ async def button_press(update, context): data = callback_query.data if "gpt-" in data or "claude" in data or "mixtral" in data or "llama" in data or "gemini" in data or (config.CUSTOM_MODELS and data in config.CUSTOM_MODELS): config.GPT_ENGINE = data + # print("config.GPT_ENGINE", config.GPT_ENGINE) if (config.API and "gpt-" in data) or (config.API and not config.ClaudeAPI) or (config.API and config.CUSTOM_MODELS and data in config.CUSTOM_MODELS): config.ChatGPTbot = GPT(api_key=f"{config.API}", engine=config.GPT_ENGINE, system_prompt=config.systemprompt, temperature=config.temperature) config.ChatGPTbot.reset(convo_id=str(update.effective_chat.id), system_prompt=config.systemprompt) diff --git a/config.py b/config.py index 3231f785..d1cb8e6b 100644 --- a/config.py +++ b/config.py @@ -37,10 +37,7 @@ from utils.chatgpt2api import Chatbot as GPT from utils.chatgpt2api import Imagebot, claudebot, groqbot, claude3bot, gemini_bot if API: - try: - ChatGPTbot = GPT(api_key=f"{API}", engine=GPT_ENGINE, system_prompt=systemprompt, temperature=temperature) - except: - ChatGPTbot = GPT(api_key=f"{API}", engine="gpt-3.5-turbo-1106", system_prompt=systemprompt, temperature=temperature) + ChatGPTbot = GPT(api_key=f"{API}", engine=GPT_ENGINE, system_prompt=systemprompt, temperature=temperature) translate_bot = GPT(api_key=f"{API}", engine=GPT_ENGINE, system_prompt=systemprompt, temperature=temperature) copilot_bot = GPT(api_key=f"{API}", engine=GPT_ENGINE, system_prompt=prompt.search_system_prompt.format(LANGUAGE), temperature=temperature) @@ -150,7 +147,7 @@ def create_buttons(strings): if GROQ_API_KEY: initial_model.extend([ "mixtral-8x7b-32768", - "llama2-70b-4096", + "llama3-70b-8192", ]) if GOOGLE_AI_API_KEY: initial_model.extend([ diff --git a/requirements.txt b/requirements.txt index 485faf71..8f850c52 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ --index-url https://pypi.python.org/simple/ -tiktoken requests +tiktoken==0.6.0 md2tgmd==0.1.2 # jieba python-dotenv diff --git a/utils/chatgpt2api.py b/utils/chatgpt2api.py index 4da93343..4f150c4e 100644 --- a/utils/chatgpt2api.py +++ b/utils/chatgpt2api.py @@ -56,6 +56,7 @@ def get_filtered_keys_from_object(obj: object, *keys: str) -> Set[str]: "gpt-4-turbo-2024-04-09", "mixtral-8x7b-32768", "llama2-70b-4096", + "llama3-70b-8192", "claude-2.1", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", @@ -177,7 +178,7 @@ def ask_stream( } json_post = { - "model": os.environ.get("MODEL_NAME") or model or self.engine, + "model": model or self.engine, "prompt": self.conversation.Conversation(convo_id) if pass_history else f"\n\nHuman:{prompt}\n\nAssistant:", "stream": True, "temperature": kwargs.get("temperature", self.temperature), @@ -335,7 +336,7 @@ def ask_stream( } json_post = { - "model": os.environ.get("MODEL_NAME") or model or self.engine, + "model": model or self.engine, "messages": self.conversation[convo_id] if pass_history else [{ "role": "user", "content": prompt @@ -711,7 +712,7 @@ def get_post_body( **kwargs, ): json_post_body = { - "model": os.environ.get("MODEL_NAME") or model or self.engine, + "model": model or self.engine, "messages": self.conversation[convo_id] if pass_history else [{"role": "system","content": self.system_prompt},{"role": role, "content": prompt}], "max_tokens": 5000, "stream": True, @@ -1110,7 +1111,7 @@ class groqbot: def __init__( self, api_key: str, - engine: str = os.environ.get("GPT_ENGINE") or "mixtral-8x7b-32768", + engine: str = os.environ.get("GPT_ENGINE") or "llama3-70b-8192", temperature: float = 0.5, top_p: float = 1, chat_url: str = "https://api.groq.com/openai/v1/chat/completions", @@ -1221,13 +1222,15 @@ def ask_stream( "role": "user", "content": prompt }], - "model": os.environ.get("GPT_ENGINE") or model or self.engine, + "model": model or self.engine, "temperature": kwargs.get("temperature", self.temperature), "max_tokens": model_max_tokens, "top_p": kwargs.get("top_p", self.top_p), "stop": None, "stream": True, } + # print("json_post", json_post) + # print(os.environ.get("GPT_ENGINE"), model, self.engine) try: response = self.session.post( @@ -1288,7 +1291,7 @@ class gemini_bot: def __init__( self, api_key: str, - engine: str = os.environ.get("GPT_ENGINE") or "claude-3-opus-20240229", + engine: str = os.environ.get("GPT_ENGINE") or "gemini-1.5-pro-latest", temperature: float = 0.5, top_p: float = 0.7, chat_url: str = "https://generativelanguage.googleapis.com/v1beta/models/{model}:{stream}?key={api_key}", @@ -1417,7 +1420,7 @@ def ask_stream( } print(json.dumps(json_post, indent=4, ensure_ascii=False)) - url = self.chat_url.format(model=os.environ.get("MODEL_NAME") or model or self.engine, stream="streamGenerateContent", api_key=self.api_key) + url = self.chat_url.format(model=model or self.engine, stream="streamGenerateContent", api_key=self.api_key) try: response = self.session.post(