Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Toolkits/issue949 generate openai tool schema #1070

Open
wants to merge 14 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions camel/toolkits/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
OpenAIFunction,
get_openai_function_schema,
get_openai_tool_schema,
generate_docstring,
)
from .open_api_specs.security_config import openapi_security_config

Expand Down Expand Up @@ -46,6 +47,7 @@
'OpenAIFunction',
'get_openai_function_schema',
'get_openai_tool_schema',
"generate_docstring",
'openapi_security_config',
'GithubToolkit',
'MathToolkit',
Expand Down
184 changes: 183 additions & 1 deletion camel/toolkits/function_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
import warnings
from inspect import Parameter, signature
from inspect import Parameter, getsource, signature
from typing import Any, Callable, Dict, Mapping, Optional, Tuple

from docstring_parser import parse
Expand All @@ -21,6 +21,12 @@
from pydantic import create_model
from pydantic.fields import FieldInfo

from camel.agents import ChatAgent
from camel.configs import ChatGPTConfig
from camel.messages import BaseMessage
from camel.models import ModelFactory
from camel.models.base_model import BaseModelBackend
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
from camel.types import ModelPlatformType, ModelType
from camel.utils import get_pydantic_object_schema, to_pascal


Expand Down Expand Up @@ -143,6 +149,74 @@ def _create_mol(name, field):
return openai_tool_schema


def generate_docstring(
code: str,
model: BaseModelBackend,
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
) -> str:
"""Generates a docstring for a given function code using LLM.

Args:
code (str): The source code of the function.
model (BaseModelBackend): The model used for generating the docstring.

Returns:
str: The generated docstring.
"""
# Create the docstring prompt
docstring_prompt = '''
**Role**: Generate professional Python docstrings conforming to
PEP 8/PEP 257.

**Requirements**:
- Use appropriate format: reST, Google, or NumPy, as needed.
- Include parameters, return values, and exceptions.
- Reference any existing docstring in the function and
retain useful information.

**Input**: Python function.

**Output**: Docstring content (plain text, no code markers).

**Example:**

Input:
```python
def add(a: int, b: int) -> int:
return a + b
```

Output:
Adds two numbers.
Args:
a (int): The first number.
b (int): The second number.

Returns:
int: The sum of the two numbers.

**Task**: Generate a docstring for the function below.

'''
# Initialize assistant with system message and model
assistant_sys_msg = BaseMessage.make_assistant_message(
role_name="Assistant",
content="You are a helpful assistant.",
)
docstring_assistant = ChatAgent(
assistant_sys_msg, model=model, token_limit=4096
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
)

# Create user message to prompt the assistant
user_msg = BaseMessage.make_user_message(
role_name="User",
content=docstring_prompt + code,
)

# Get the response containing the generated docstring
response = docstring_assistant.step(user_msg)
return response.msg.content


class FunctionTool:
r"""An abstraction of a function that OpenAI chat models can call. See
https://platform.openai.com/docs/api-reference/chat/create.
Expand All @@ -156,18 +230,46 @@ class FunctionTool:
openai_tool_schema (Optional[Dict[str, Any]], optional): A user-defined
openai tool schema to override the default result.
(default: :obj:`None`)
schema_assistant (Optional[BaseModelBackend], optional): An assistant
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
(e.g., an LLM model) used to generate the schema if no valid
schema is provided and use_schema_assistant is enabled.
(default: :obj:`None`)
use_schema_assistant (bool, optional): Whether to enable the use of
the schema_assistant to automatically generate the schema if
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
validation fails or no valid schema is provided.
(default: :obj:`False`)
"""

def __init__(
self,
func: Callable,
openai_tool_schema: Optional[Dict[str, Any]] = None,
schema_assistant: Optional[BaseModelBackend] = None,
use_schema_assistant: Optional[bool] = False,
) -> None:
self.func = func
self.openai_tool_schema = openai_tool_schema or get_openai_tool_schema(
func
)

if use_schema_assistant:
try:
self.validate_openai_tool_schema(self.openai_tool_schema)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

openai tool call actually support descriptions as optional. I remove it from the validation: #1085. So maybe we should remove the try and except and generate the schema directly if use_schema_assistant

except Exception:
print(
f"Warning: No valid schema found for "
f"{self.func.__name__}. "
f"Attempting to generate one using LLM."
)
schema = self.generate_openai_tool_schema(schema_assistant)
if schema:
self.openai_tool_schema = schema
else:
raise ValueError(
f"Failed to generate valid schema for "
f"{self.func.__name__}"
)

@staticmethod
def validate_openai_tool_schema(
openai_tool_schema: Dict[str, Any],
Expand Down Expand Up @@ -362,6 +464,86 @@ def set_parameter(self, param_name: str, value: Dict[str, Any]):
param_name
] = value

def generate_openai_tool_schema(
self,
schema_assistant: Optional[BaseModelBackend] = None,
) -> Dict[str, Any]:
r"""Generates an OpenAI tool schema for the specified function.

This method generates the OpenAI tool schema using the provided
LLM assistant. If no assistant is provided, it defaults
to creating a GPT_4O_MINI model. The function's source code is used
to generate a docstring and schema, which are validated before
returning the final schema. If schema generation or validation fails,
the process retries up to two times.

Args:
schema_assistant (Optional[BaseModelBackend]): An optional
assistant model to use for schema generation. If not provided, a
GPT_4O_MINI model will be created.

Returns:
Dict[str, Any]: The generated OpenAI tool schema for the function.

Raises:
ValueError: If schema generation or validation fails after the
maximum number of retries, a ValueError is raised,
prompting manual schema setting.
"""
if not schema_assistant:
print(
"Warning: No model provided. "
"Use GPT_4O_MINI to generate the schema."
)
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
try:
schema_assistant = ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_4O_MINI,
model_config_dict=ChatGPTConfig(temperature=1.0).as_dict(),
)
except Exception as e:
raise ValueError(
f"Failed to generate the OpenAI tool schema for "
f"the function {self.func.__name__}. "
f"Please set the OpenAI tool schema manually."
) from e
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved

function_string = getsource(self.func)
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved

max_retries = 2
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
retries = 0

# Retry loop to handle schema generation and validation
while retries < max_retries:
try:
# Generate the docstring and the schema
docstring = generate_docstring(
function_string, schema_assistant
)
self.func.__doc__ = docstring
schema = get_openai_tool_schema(self.func)

# Validate the schema
self.validate_openai_tool_schema(schema)

print(
f"Successfully generated the OpenAI tool schema for "
f"the function {self.func.__name__}."
)
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
return schema

except Exception as e:
retries += 1
if retries == max_retries:
raise ValueError(
f"Failed to generate the OpenAI tool Schema. "
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
f"Please set the OpenAI tool schema for "
f"function {self.func.__name__} manually."
) from e
print("Schema validation failed. Retrying...")
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved

return {}

@property
def parameters(self) -> Dict[str, Any]:
r"""Getter method for the property :obj:`parameters`.
Expand Down
87 changes: 87 additions & 0 deletions examples/tool_call/generate_openai_tool_schema_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========

import os

from camel.agents import ChatAgent
from camel.configs.openai_config import ChatGPTConfig
Zhangzeyu97 marked this conversation as resolved.
Show resolved Hide resolved
from camel.messages import BaseMessage
from camel.models import ModelFactory
from camel.toolkits import FunctionTool
from camel.types import ModelPlatformType, ModelType

# Set OpenAI API key
api_key = os.getenv("OPENAI_API_KEY")
if not api_key:
raise ValueError("API key not found in environment variables.")


# Define a function which does't have a docstring
def get_perfect_square(n: int) -> int:
return n**2


# Create a model instance
model_config_dict = ChatGPTConfig(temperature=1.0).as_dict()
agent_model = ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_4O_MINI,
model_config_dict=model_config_dict,
)

# Create a FunctionTool with the function
function_tool = FunctionTool(
get_perfect_square, schema_assistant=agent_model, use_schema_assistant=True
)
print("\nGenerated OpenAI Tool Schema:")
print(function_tool.get_openai_tool_schema())

# Set system message for the assistant
assistant_sys_msg = BaseMessage.make_assistant_message(
role_name="Assistant", content="You are a helpful assistant."
)

# Create a ChatAgent with the tool
camel_agent = ChatAgent(
system_message=assistant_sys_msg, model=agent_model, tools=[function_tool]
)
camel_agent.reset()

# Define a user message
user_prompt = "What is the perfect square of 2024?"
user_msg = BaseMessage.make_user_message(role_name="User", content=user_prompt)

# Get response from the assistant
response = camel_agent.step(user_msg)
print("\nAssistant Response:")
print(response.msg.content)

print("""
===============================================================================
Warning: No model provided. Use GPT_4O_MINI to generate the schema for
the function get_perfect_square. Attempting to generate one using LLM.
Successfully generated the OpenAI tool schema for
the function get_perfect_square.

Generated OpenAI Tool Schema:
{'type': 'function', 'function': {'name': 'get_perfect_square',
'description': 'Calculates the perfect square of a given integer.',
'parameters': {'properties': {'n': {'type': 'integer',
'description': 'The integer to be squared.'}}, 'required': ['n'],
'type': 'object'}}}

[FunctionCallingRecord(func_name='get_perfect_square', args={'n': 2024},
result={'result': 4096576})]
===============================================================================
""")
Loading
Loading