From 1f212f4587297c2c81bc690190a1e85241f1da5d Mon Sep 17 00:00:00 2001 From: Fedor Date: Wed, 8 May 2024 08:19:37 +0100 Subject: [PATCH] add integration test --- .../chat_models/test_bedrock.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/libs/aws/tests/integration_tests/chat_models/test_bedrock.py b/libs/aws/tests/integration_tests/chat_models/test_bedrock.py index 31f00caa..e7ba4ef2 100644 --- a/libs/aws/tests/integration_tests/chat_models/test_bedrock.py +++ b/libs/aws/tests/integration_tests/chat_models/test_bedrock.py @@ -1,4 +1,5 @@ """Test Bedrock chat model.""" + from typing import Any, cast import pytest @@ -73,6 +74,22 @@ def test_chat_bedrock_streaming() -> None: assert isinstance(response, BaseMessage) +@pytest.mark.scheduled +def test_chat_bedrock_streaming_llama3() -> None: + """Test that streaming correctly invokes on_llm_new_token callback.""" + callback_handler = FakeCallbackHandler() + chat = ChatBedrock( # type: ignore[call-arg] + model_id="meta.llama3-8b-instruct-v1:0", + streaming=True, + callbacks=[callback_handler], + verbose=True, + ) + message = HumanMessage(content="Hello") + response = chat([message]) + assert callback_handler.llm_streams > 0 + assert isinstance(response, BaseMessage) + + @pytest.mark.scheduled def test_chat_bedrock_streaming_generation_info() -> None: """Test that generation info is preserved when streaming."""