openai-python icon indicating copy to clipboard operation
openai-python copied to clipboard

Error code: 400 - {'error': {'message': "Invalid parameter: 'tool_calls' cannot be used when 'functions' are present

Open Birdy647JH opened this issue 1 year ago • 0 comments

Confirm this is an issue with the Python library and not an underlying OpenAI API

  • [X] This is an issue with the Python library

Describe the bug

Error code: 400 - {'error': {'message': "Invalid parameter: 'tool_calls' cannot be used when 'functions' are present. Please use 'tools' instead of 'functions'.", 'type': 'invalid_request_error', 'param': 'messages.[2].tool_calls', 'code': None}}

To Reproduce

Structure is in the code snippet, after MAssassor answer its question (after using tool and then go back to MAssasor), should be go back to supervisor, but it give above error. Screenshot 2024-10-17 at 10 17 20 AM

Code snippets

## first read guideline and then go to the different source to get data: 
def create_team_supervisor(llm, system_prompt, members) -> str:
    """An LLM-based router."""
    options = ["FINISH"] + members
    function_def = {
        "name": "route",
        "description": "Select the next role.",
        "parameters": {
            "title": "routeSchema",
            "type": "object",
            "properties": {
                "next": {
                    "title": "Next",
                    "anyOf": [
                        {"enum": options},
                    ],
                },
            },
            "required": ["next"], 
        },
    }
    
    prompt = ChatPromptTemplate.from_messages(
        [
            ("system", system_prompt),
            MessagesPlaceholder(variable_name="messages"),
            (
                "system",
                "Given the conversation above, who should act next?"
                " Or should we FINISH? Select one of: {options}",
            ),
        ]
    ).partial(options=str(options), team_members=", ".join(members))
    return (
        prompt
        | llm.bind_functions(functions=[function_def], function_call="route")   
        | JsonOutputFunctionsParser()
    )


class TeamState(TypedDict):
    # A message is added after each team member finishes
    messages: Annotated[List[BaseMessage], operator.add]
    # The team members are tracked so they are aware of
    # the others' skill-sets

    POLICY_NUMBER: str 
    CLAIM_NUMBER: str 
    FILE_PATH: str

    team_members: List[str]
    # Used to route work. The supervisor calls a function
    # that will update this every time it makes a decision
    next: str

    sender:str

def router(state):
    # This is the router
    messages = state["messages"]
    last_message = messages[-1]
    if not last_message.content:
        # The previous agent is invoking a tool
        # print(state['sender'], 'call_tool')
        return "call_tool"
    # print(state['sender'], "CogSupervisor")
    return 'continue'

graph = StateGraph(TeamState)

## Add nodes to the graph
graph.add_node("MAssassor", med_node)
graph.add_node("BAssassor", beh_node)
graph.add_node("Supervisor", cog_supervisor)
graph.add_node("call_tool", tool_node)

graph.add_edge(START, "Supervisor")

## Add condition edges to the graph 
graph.add_conditional_edges("MAssassor",
    router,
    {"call_tool": "call_tool", "continue": "Supervisor"},) 
graph.add_conditional_edges("BAssassor",
    router,
    {"call_tool": "call_tool", "continue": "Supervisor"},)
graph.add_conditional_edges(
    "Supervisor",
    lambda x: x["next"],
    {"MAssassor": "MAssassor",
     "BAssassor": "BAssassor", 
     "FINISH": END},
)

graph.add_conditional_edges(
    "call_tool",
    lambda x: x["sender"],
    {"MAssassor": "MAssassor",
     "BAssassor": "BAssassor",
    },
)

chain = graph.compile()

OS

macOS

Python version

Python 3.9.6

Library version

openai 1.51.2

Birdy647JH avatar Oct 17 '24 14:10 Birdy647JH