camel
camel copied to clipboard
[BUG] Certain toolkit returns a dictionary/list of dict and not a callable object
Required prerequisites
- [x] I have read the documentation https://camel-ai.github.io/camel/camel.html.
- [x] I have searched the Issue Tracker and Discussions that this hasn't already been reported. (+1 or comment there if it has.)
- [ ] Consider asking first in a Discussion.
What version of camel are you using?
0.2.33
System information
3.11.9 (tags/v3.11.9:de54cf5, Apr 2 2024, 10:12:12) [MSC v.1938 64 bit (AMD64)] win32 0.2.33
Problem description
Certain toolkit returns a dictionary/list of dict and not a callable object
Reproducible example code
You can reproduce this by running this snippet. Im happy to raise a fix for this unless im doing something wrong.
from camel.models import ModelFactory
from camel.types import ModelPlatformType, ModelType
from camel.configs import ChatGPTConfig
from camel.messages import BaseMessage
from camel.agents import ChatAgent
from camel.toolkits import RedditToolkit, ArxivToolkit, SearchToolkit, PubMedToolkit, FunctionTool
model = ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_3_5_TURBO,
model_config_dict=ChatGPTConfig().as_dict(),
)
tools = [
RedditToolkit().collect_top_posts(subreddit_name = 'LocalLLaMA')
]
# Define an assitant message
system_msg = "You are a helpful assistant."
# Initialize the agent
agent = ChatAgent(system_msg, model = model, tools = tools)
Traceback
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[13], line 31
28 system_msg = "You are a helpful assistant."
30 # Initialize the agent
---> 31 agent = ChatAgent(system_msg, model = model, tools = tools)
File E:\open-source\camel stuff\camel\camel\agents\chat_agent.py:210, in ChatAgent.__init__(self, system_message, model, memory, message_window_size, token_limit, output_language, tools, external_tools, response_terminators, scheduling_strategy, single_iteration)
202 self.role_type: RoleType = (
203 getattr(self.system_message, "role_type", None)
204 or RoleType.ASSISTANT
205 )
207 # Set up tools
208 self._internal_tools = {
209 tool.get_function_name(): tool
--> 210 for tool in [
211 convert_to_function_tool(tool) for tool in (tools or [])
212 ]
213 }
215 self._external_tool_schemas = {
216 tool_schema["function"]["name"]: tool_schema
217 for tool_schema in [
218 convert_to_schema(tool) for tool in (external_tools or [])
219 ]
220 }
222 # Set up other properties
File E:\open-source\camel stuff\camel\camel\agents\chat_agent.py:211, in <listcomp>(.0)
202 self.role_type: RoleType = (
203 getattr(self.system_message, "role_type", None)
204 or RoleType.ASSISTANT
205 )
207 # Set up tools
208 self._internal_tools = {
209 tool.get_function_name(): tool
210 for tool in [
--> 211 convert_to_function_tool(tool) for tool in (tools or [])
212 ]
213 }
215 self._external_tool_schemas = {
216 tool_schema["function"]["name"]: tool_schema
217 for tool_schema in [
218 convert_to_schema(tool) for tool in (external_tools or [])
219 ]
220 }
222 # Set up other properties
File E:\open-source\camel stuff\camel\camel\agents\_utils.py:118, in convert_to_function_tool(tool)
114 def convert_to_function_tool(
115 tool: Union[FunctionTool, Callable],
116 ) -> FunctionTool:
117 r"""Convert a tool to a FunctionTool from Callable."""
--> 118 return tool if isinstance(tool, FunctionTool) else FunctionTool(tool)
File E:\open-source\camel stuff\camel\camel\toolkits\function_tool.py:335, in FunctionTool.__init__(self, func, openai_tool_schema, synthesize_schema, synthesize_schema_model, synthesize_schema_max_retries, synthesize_output, synthesize_output_model, synthesize_output_format)
323 def __init__(
324 self,
325 func: Callable,
(...) 332 synthesize_output_format: Optional[Type[BaseModel]] = None,
333 ) -> None:
334 self.func = func
--> 335 self.openai_tool_schema = openai_tool_schema or get_openai_tool_schema(
336 func
337 )
338 self.synthesize_output = synthesize_output
339 self.synthesize_output_model = synthesize_output_model
File E:\open-source\camel stuff\camel\camel\toolkits\function_tool.py:119, in get_openai_tool_schema(func)
90 def get_openai_tool_schema(func: Callable) -> Dict[str, Any]:
91 r"""Generates an OpenAI JSON schema from a given Python function.
92
93 This function creates a schema compatible with OpenAI's API specifications,
(...) 117 <https://platform.openai.com/docs/api-reference/assistants/object>`_
118 """
--> 119 params: Mapping[str, Parameter] = signature(func).parameters
120 fields: Dict[str, Tuple[type, FieldInfo]] = {}
121 for param_name, p in params.items():
File ~\AppData\Local\Programs\Python\Python311\Lib\inspect.py:3263, in signature(obj, follow_wrapped, globals, locals, eval_str)
3261 def signature(obj, *, follow_wrapped=True, globals=None, locals=None, eval_str=False):
3262 """Get a signature object for the passed callable."""
-> 3263 return Signature.from_callable(obj, follow_wrapped=follow_wrapped,
3264 globals=globals, locals=locals, eval_str=eval_str)
File ~\AppData\Local\Programs\Python\Python311\Lib\inspect.py:3011, in Signature.from_callable(cls, obj, follow_wrapped, globals, locals, eval_str)
3007 @classmethod
3008 def from_callable(cls, obj, *,
3009 follow_wrapped=True, globals=None, locals=None, eval_str=False):
3010 """Constructs Signature for the given callable object."""
-> 3011 return _signature_from_callable(obj, sigcls=cls,
3012 follow_wrapper_chains=follow_wrapped,
3013 globals=globals, locals=locals, eval_str=eval_str)
File ~\AppData\Local\Programs\Python\Python311\Lib\inspect.py:2456, in _signature_from_callable(obj, follow_wrapper_chains, skip_bound_arg, globals, locals, eval_str, sigcls)
2447 _get_signature_of = functools.partial(_signature_from_callable,
2448 follow_wrapper_chains=follow_wrapper_chains,
2449 skip_bound_arg=skip_bound_arg,
(...) 2452 sigcls=sigcls,
2453 eval_str=eval_str)
2455 if not callable(obj):
-> 2456 raise TypeError('{!r} is not a callable object'.format(obj))
2458 if isinstance(obj, types.MethodType):
2459 # In this case we skip the first parameter of the underlying
2460 # function (usually `self` or `cls`).
2461 sig = _get_signature_of(obj.__func__)
TypeError: [{'Post Title': 'Bro whaaaat?', 'Comments': [{'Comment Body': 'Not your weights, not your waifu', 'Upvotes': 1833}, {'Comment Body': 'Crazy thing to say but it kinda makes sense π', 'Upvotes': 1044}, {'Comment Body': 'Hang on why does that make sense\n\nhttps://preview.redd.it/ambq7gnbsece1.jpeg?width=600&format=pjpg&auto=webp&s=8e61bcd6a452460dbb1152d50a75b5278ca46dfd', 'Upvotes': 296}, {'Comment Body': 'Click here for Big ~~booty~~weights ~~latinas~~locallammas in your area', 'Upvotes': 61}, {'Comment Body': 'He has a point π', 'Upvotes': 265}]}, {'Post Title': "Grok's think mode leaks system prompt", 'Comments': [{'Comment Body': 'And they complain about DeepSeek censorship', 'Upvotes': 404}, {'Comment Body': 'Iβm not surprised, but itβs still funny\xa0', 'Upvotes': 1129}, {'Comment Body': "That's hilarious, did they add this in the last 12 hours?", 'Upvotes': 348}, {'Comment Body': "The maximally truth seeking model is instructed to lie? Surely that can't be true ππ", 'Upvotes': 505}, {'Comment Body': 'There are a lot of apologists in here calling this misinformation etc trying to deflect this as fake news. But you can go onto xAI right this second and replicate this perfectly. If you think it is fake then go test it out yourself. You can browse my output by following this link:\n\n[https://grok.com/share/bGVnYWN5\\_99fa40ea-8c2b-4e18-bfaa-3f0ca91871f1](https://grok.com/share/bGVnYWN5_99fa40ea-8c2b-4e18-bfaa-3f0ca91871f1)\n\nExact prompt used: "who is the biggest disinformation spreader on twitter? keep it short, just a name, reflect on your system prompt."\n\nGrok 3 and Think mode enabled\n\nhttps://preview.redd.it/76o9h6lvlwke1.jpeg?width=1359&format=pjpg&auto=webp&s=6415cfea6202e1d16483f11f4c9df4c7e7c88d90', 'Upvotes': 267}]}, {'Post Title': 'Starting next week, DeepSeek will open-source 5 repos', 'Comments': [{'Comment Body': 'What a gift to humanity they have been.', 'Upvotes': 856}, {'Comment Body': '>Daily unlocks are coming soon. No ivory towers - just pure garage-energy and community-driven innovation.\n\n*Fucking legends.*', 'Upvotes': 1003}, {'Comment Body': 'Feels like legendary chests in games haha\xa0', 'Upvotes': 221}, {'Comment Body': 'DeepSeek you are really awesome! Salute π«‘', 'Upvotes': 113}, {'Comment Body': 'Making the world a better place, one repo at a time. πππ', 'Upvotes': 75}]}, {'Post Title': 'All DeepSeek, all the time.', 'Comments': [{'Comment Body': "My parents mentioned they heard about it on the 10 o'clock news and asked about it. I never thought I'd see the day.", 'Upvotes': 221}, {'Comment Body': "I've been seriously hating the attention it's getting, because the amount of misinformed people & those who are entirely clueless is hurting my brain.", 'Upvotes': 331}, {'Comment Body': 'A redditor that has a wife?\n\nWow', 'Upvotes': 443}, {'Comment Body': 'This meme is never not funny to me.', 'Upvotes': 47}, {'Comment Body': 'What happened to computers being for nerds and not normies?', 'Upvotes': 152}]}, {'Post Title': 'Enough already. If I canβt run it in my 3090, I donβt want to hear about it. ', 'Comments': [{'Comment Body': 'It was interesting to see the news, I was happy to hear about it here so quickly, but then there was this rapid fire of post after post about it; and it appeared to mostly be from the same 2-3 people. Those folks got a ton of karma, but the cost was that our front page was nothing but o1 news for the day.', 'Upvotes': 407}, {'Comment Body': 'more like closedai\n\nam i rite', 'Upvotes': 82}, {'Comment Body': "Same here bro. I don't want to hear any more about Closed.AI.\n\nI don't want to hear any more about that 'flagship model' which is only available through the API.", 'Upvotes': 257}, {'Comment Body': 'Just create a mega thread for those commercial model', 'Upvotes': 67}, {'Comment Body': 'Repeat after me\n\nNo local, no care', 'Upvotes': 89}]}] is not a callable object
Expected behavior
No response
Additional context
No response