Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan committed Oct 29, 2024
1 parent 1a196a8 commit f5a49f2
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 12 deletions.
2 changes: 1 addition & 1 deletion libs/genai/langchain_google_genai/_genai_extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def _prepare_config(
client_info: Optional[gapic_v1.client_info.ClientInfo] = None,
transport: Optional[str] = None,
) -> Dict[str, Any]:
formatted_client_options = {"api_endpoint": _config.api_endpoint}
formatted_client_options: dict = {"api_endpoint": _config.api_endpoint}
if client_options:
formatted_client_options.update(**client_options)
if not credentials and api_key:
Expand Down
29 changes: 18 additions & 11 deletions libs/genai/langchain_google_genai/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -610,12 +610,17 @@ def _response_to_result(
]
message = _parse_response_candidate(candidate, streaming=stream)
message.usage_metadata = lc_usage
generations.append(
(ChatGenerationChunk if stream else ChatGeneration)(
message=message,
generation_info=generation_info,
if stream:
generations.append(
ChatGenerationChunk(
message=cast(AIMessageChunk, message),
generation_info=generation_info,
)
)
else:
generations.append(
ChatGeneration(message=message, generation_info=generation_info)
)
)
if not response.candidates:
# Likely a "prompt feedback" violation (e.g., toxic input)
# Raising an error would be different than how OpenAI handles it,
Expand All @@ -624,12 +629,14 @@ def _response_to_result(
"Gemini produced an empty response. Continuing with empty message\n"
f"Feedback: {response.prompt_feedback}"
)
generations = [
(ChatGenerationChunk if stream else ChatGeneration)(
message=(AIMessageChunk if stream else AIMessage)(content=""),
generation_info={},
)
]
if stream:
generations = [
ChatGenerationChunk(
message=AIMessageChunk(content=""), generation_info={}
)
]
else:
generations = [ChatGeneration(message=AIMessage(""), generation_info={})]
return ChatResult(generations=generations, llm_output=llm_output)


Expand Down

0 comments on commit f5a49f2

Please sign in to comment.