diff --git a/holmes/core/conversations.py b/holmes/core/conversations.py index 7a684fe..65bf32b 100644 --- a/holmes/core/conversations.py +++ b/holmes/core/conversations.py @@ -330,8 +330,6 @@ def build_chat_messages( "content": ask, }, ] - print("MESSAGES BEFORE SENDING") - print(messages) return messages if global_instructions and global_instructions.instructions and len(global_instructions.instructions[0]) > 0: @@ -357,6 +355,4 @@ def build_chat_messages( ai, conversation_history_without_tools, number_of_tools ) truncate_tool_messages(conversation_history, tool_size) - print("CONVERSATION_HISTORY BEFORE SENDING") - print(conversation_history) return conversation_history diff --git a/holmes/core/investigation.py b/holmes/core/investigation.py index d34504e..efb3b0f 100644 --- a/holmes/core/investigation.py +++ b/holmes/core/investigation.py @@ -18,7 +18,7 @@ def investigate_issues(investigate_request: InvestigateRequest, dal: SupabaseDal "alert", investigate_request.context.get("issue_type") ) global_instructions = dal.get_global_instructions_for_account() - print(f"GLOBAL INSTRUCTIONS: {global_instructions}") + raw_data = investigate_request.model_dump() if context: raw_data["extra_context"] = context diff --git a/holmes/core/supabase_dal.py b/holmes/core/supabase_dal.py index 75aa5f9..38b5492 100644 --- a/holmes/core/supabase_dal.py +++ b/holmes/core/supabase_dal.py @@ -195,8 +195,8 @@ def get_resource_instructions(self, type: str, name: Optional[str]) -> Optional[ return None def get_global_instructions_for_account(self) -> Optional[Intructions]: - print(self.account_id) - res = ( + try: + res = ( self.client .table(RUNBOOKS_TABLE) .select("runbook") @@ -205,11 +205,12 @@ def get_global_instructions_for_account(self) -> Optional[Intructions]: .execute() ) - if res.data: - instructions = res.data[0].get("runbook").get("instructions") - print(Intructions(instructions=instructions)) - return Intructions(instructions=instructions) - + if res.data: + instructions = res.data[0].get("runbook").get("instructions") + return Intructions(instructions=instructions) + except Exception: + logging.exception("Failed to fetch global instructions", exc_info=True) + return None def create_session_token(self) -> str: diff --git a/holmes/core/tool_calling_llm.py b/holmes/core/tool_calling_llm.py index 817808f..252a62b 100644 --- a/holmes/core/tool_calling_llm.py +++ b/holmes/core/tool_calling_llm.py @@ -370,7 +370,7 @@ def investigate( user_prompt += f"\n\nGlobal Instructions (use only if relevant): {global_instructions.instructions[0]}\n" user_prompt = f"{user_prompt}\n This is context from the issue {issue.raw}" - print(user_prompt) + logging.debug( "Rendered system prompt:\n%s", textwrap.indent(system_prompt, " ") ) diff --git a/server.py b/server.py index 64d5f35..57f38d8 100644 --- a/server.py +++ b/server.py @@ -173,7 +173,7 @@ def issue_conversation(issue_chat_request: IssueChatRequest): load_robusta_api_key(dal=dal, config=config) ai = config.create_toolcalling_llm(console, dal=dal) global_instructions = dal.get_global_instructions_for_account() - print(f"GLOBAL INSTRUCTION: {global_instructions}") + messages = build_issue_chat_messages(issue_chat_request, ai, global_instructions) llm_call = ai.messages_call(messages=messages) @@ -193,7 +193,7 @@ def chat(chat_request: ChatRequest): ai = config.create_toolcalling_llm(console, dal=dal) global_instructions = dal.get_global_instructions_for_account() - print(f"GLOBAL INSTRUCTION: {global_instructions}") + messages = build_chat_messages( chat_request.ask, chat_request.conversation_history, ai=ai, global_instructions=global_instructions )