diff --git a/libs/superagent/app/agents/llm.py b/libs/superagent/app/agents/llm.py index fcb19ce16..bc2a518fd 100644 --- a/libs/superagent/app/agents/llm.py +++ b/libs/superagent/app/agents/llm.py @@ -10,6 +10,7 @@ from app.utils.prisma import prisma from prisma.enums import AgentType, LLMProvider from prisma.models import Agent +from prompts.function_calling_agent import FUNCTION_CALLING_AGENT_PROMPT from prompts.json import JSON_FORMAT_INSTRUCTIONS logger = logging.getLogger(__name__) @@ -58,6 +59,7 @@ async def init(self): # E.g parent agent metadata have a model key, and if we pass it to ChatOpenAI # It will give us an error, because we will try to use parent Agent's LLM model. self.agent_config.metadata = {} + self.agent_config.prompt = FUNCTION_CALLING_AGENT_PROMPT await self._set_llm() await self._set_tools_return_direct() @@ -137,7 +139,9 @@ async def ainvoke(self, input, *_, **kwargs): input=input ) - if function_calling_res.get("output"): + if function_calling_res.get("output") and function_calling_res.get( + "intermediate_steps" + ): INPUT_TEMPLATE = "{input}\n Context: {context}\n" input = INPUT_TEMPLATE.format( input=input, context=function_calling_res.get("output") diff --git a/libs/superagent/prompts/function_calling_agent.py b/libs/superagent/prompts/function_calling_agent.py new file mode 100644 index 000000000..5d7ba3350 --- /dev/null +++ b/libs/superagent/prompts/function_calling_agent.py @@ -0,0 +1,4 @@ +FUNCTION_CALLING_AGENT_PROMPT = """ + Your job is to call available functions if needed to answer the user's question. + You should simply call the functions. If available functions do not help you to answer the question, just return 'None'. +""" diff --git a/libs/ui/app/integrations/llm.tsx b/libs/ui/app/integrations/llm.tsx index c7fc9a296..8f2bf5ba3 100644 --- a/libs/ui/app/integrations/llm.tsx +++ b/libs/ui/app/integrations/llm.tsx @@ -103,6 +103,7 @@ export default function LLM({ }, }) + console.log("form", form.formState.errors) async function onSubmit(values: z.infer) { const payload = { ...values,