From 7dfefbc88a4d6c181d2a21e178739fad58e2f43c Mon Sep 17 00:00:00 2001 From: Fedor Date: Tue, 30 Apr 2024 16:03:52 +0100 Subject: [PATCH] Remove extraneous <|begin_of_text|> tokens as only one is needed --- libs/aws/langchain_aws/chat_models/bedrock.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/libs/aws/langchain_aws/chat_models/bedrock.py b/libs/aws/langchain_aws/chat_models/bedrock.py index a5e59b4e..922c71c2 100644 --- a/libs/aws/langchain_aws/chat_models/bedrock.py +++ b/libs/aws/langchain_aws/chat_models/bedrock.py @@ -50,22 +50,22 @@ def convert_messages_to_prompt_llama(messages: List[BaseMessage]) -> str: def _convert_one_message_to_text_llama3(message: BaseMessage) -> str: if isinstance(message, ChatMessage): message_text = ( - f"<|begin_of_text|><|start_header_id|>{message.role}" + f"<|start_header_id|>{message.role}" f"<|end_header_id|>{message.content}<|eot_id|>" ) elif isinstance(message, HumanMessage): message_text = ( - f"<|begin_of_text|><|start_header_id|>user" + f"<|start_header_id|>user" f"<|end_header_id|>{message.content}<|eot_id|>" ) elif isinstance(message, AIMessage): message_text = ( - f"<|begin_of_text|><|start_header_id|>assistant" + f"<|start_header_id|>assistant" f"<|end_header_id|>{message.content}<|eot_id|>" ) elif isinstance(message, SystemMessage): message_text = ( - f"<|begin_of_text|><|start_header_id|>system" + f"<|start_header_id|>system" f"<|end_header_id|>{message.content}<|eot_id|>" ) else: @@ -78,7 +78,8 @@ def convert_messages_to_prompt_llama3(messages: List[BaseMessage]) -> str: """Convert a list of messages to a prompt for llama.""" return "\n".join( - [_convert_one_message_to_text_llama3(message) for message in messages] + ["<|begin_of_text|>"] + + [_convert_one_message_to_text_llama3(message) for message in messages] + ["<|start_header_id|>assistant<|end_header_id|>\n\n"] )