Skip to content

Commit

Permalink
Merge pull request #963 from superagent-ai/fix/function-calling-agent
Browse files Browse the repository at this point in the history
feat: improve function calling agent and prompt to it
  • Loading branch information
elisalimli authored Apr 15, 2024
2 parents ca574f3 + 06549c3 commit 7945869
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 1 deletion.
6 changes: 5 additions & 1 deletion libs/superagent/app/agents/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from app.utils.prisma import prisma
from prisma.enums import AgentType, LLMProvider
from prisma.models import Agent
from prompts.function_calling_agent import FUNCTION_CALLING_AGENT_PROMPT
from prompts.json import JSON_FORMAT_INSTRUCTIONS

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -58,6 +59,7 @@ async def init(self):
# E.g parent agent metadata have a model key, and if we pass it to ChatOpenAI
# It will give us an error, because we will try to use parent Agent's LLM model.
self.agent_config.metadata = {}
self.agent_config.prompt = FUNCTION_CALLING_AGENT_PROMPT

await self._set_llm()
await self._set_tools_return_direct()
Expand Down Expand Up @@ -137,7 +139,9 @@ async def ainvoke(self, input, *_, **kwargs):
input=input
)

if function_calling_res.get("output"):
if function_calling_res.get("output") and function_calling_res.get(
"intermediate_steps"
):
INPUT_TEMPLATE = "{input}\n Context: {context}\n"
input = INPUT_TEMPLATE.format(
input=input, context=function_calling_res.get("output")
Expand Down
4 changes: 4 additions & 0 deletions libs/superagent/prompts/function_calling_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
FUNCTION_CALLING_AGENT_PROMPT = """
Your job is to call available functions if needed to answer the user's question.
You should simply call the functions. If available functions do not help you to answer the question, just return 'None'.
"""
1 change: 1 addition & 0 deletions libs/ui/app/integrations/llm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ export default function LLM({
},
})

console.log("form", form.formState.errors)
async function onSubmit(values: z.infer<typeof formSchema>) {
const payload = {
...values,
Expand Down

0 comments on commit 7945869

Please sign in to comment.