autogen icon indicating copy to clipboard operation
autogen copied to clipboard

Error when Using mcp_server_tools, create_mcp_server_session etc

Open yipinterested opened this issue 7 months ago • 2 comments

What happened?

Describe the bug When I tried to use mcp server, the errors come out. I tried runing the npx command in subprocess and cmd prompt, and they works just fine. But It failed when I use in autougen agent. I checked the traceback it seemed that it was a timout error, but i have no idea how to fix it. I already updated autogen to the latest version still doesn't work.

To Reproduce

import asyncio
import dotenv
import os

from pathlib import Path
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_ext.tools.mcp import StdioServerParams, mcp_server_tools, create_mcp_server_session
from autogen_agentchat.agents import AssistantAgent
from autogen_core import CancellationToken

dotenv.load_dotenv()

async def main() -> None:
    # Setup server params for local filesystem access
    params = StdioServerParams(
        command="npx", args=["-y", f"mongodb-lens {os.getenv('MONGODB_URI')}"], read_timeout_seconds=60
    )

    # Get all available tools from the server
    tools = await mcp_server_tools(params)
    print(f"Tools: {[tool.name for tool in tools]}")
    model_client = OpenAIChatCompletionClient(
        model="qwen-max",
        base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
        api_key=os.getenv("OPENAI_API_KEY"),
        model_info={
            "vision": False,
            "function_calling": True,
            "json_output": True,
            "family": "qwen",
            "structured_output": True,
        }
    )
    # Create an agent that can use all the tools
    agent = AssistantAgent(
        name="mongohandler",
        model_client=model_client,
        tools=tools,  # type: ignore
    )

    # The agent can now use any of the filesystem tools
    await agent.run(task="list all collections", cancellation_token=CancellationToken())


if __name__ == "__main__":
    asyncio.run(main())
  + Exception Group Traceback (most recent call last):
  |   File "d:\LLM-backend\src\mcp_test.py", line 46, in <module>
  |     asyncio.run(main())
  |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\runners.py", line 190, in run
  |     return runner.run(main)
  |            ^^^^^^^^^^^^^^^^
  |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\runners.py", line 118, in run
  |     return self._loop.run_until_complete(task)
  |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\base_events.py", line 654, in run_until_complete
  |     return future.result()
  |            ^^^^^^^^^^^^^^^
  |   File "d:\LLM-backend\src\mcp_test.py", line 20, in main
  |     tools = await mcp_server_tools(params)
  |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  |   File "D:\LLM-backend\.venv\Lib\site-packages\autogen_ext\tools\mcp\_factory.py", line 192, in mcp_server_tools
  |     async with create_mcp_server_session(server_params) as temp_session:
  |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\contextlib.py", line 231, in __aexit__
  |     await self.gen.athrow(typ, value, traceback)
  |   File "D:\LLM-backend\.venv\Lib\site-packages\autogen_ext\tools\mcp\_session.py", line 18, in create_mcp_server_session
  |     async with stdio_client(server_params) as (read, write):
  |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\contextlib.py", line 231, in __aexit__
  |     await self.gen.athrow(typ, value, traceback)
  |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\client\stdio\__init__.py", line 166, in stdio_client
  |     async with (
  |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 772, in __aexit__
  |     raise BaseExceptionGroup(
  | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
  +-+---------------- 1 ----------------
    | Exception Group Traceback (most recent call last):
    |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\client\stdio\__init__.py", line 173, in stdio_client
    |     yield read_stream, write_stream
    |   File "D:\LLM-backend\.venv\Lib\site-packages\autogen_ext\tools\mcp\_session.py", line 19, in create_mcp_server_session
    |     async with ClientSession(
    |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\shared\session.py", line 210, in __aexit__
    |     return await self._task_group.__aexit__(exc_type, exc_val, exc_tb)
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 772, in __aexit__
    |     raise BaseExceptionGroup(
    | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
    +-+---------------- 1 ----------------
      | Traceback (most recent call last):
      |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\streams\memory.py", line 111, in receive
      |     return self.receive_nowait()
      |            ^^^^^^^^^^^^^^^^^^^^^
      |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\streams\memory.py", line 106, in receive_nowait
      |     raise WouldBlock
      | anyio.WouldBlock
      |
      | During handling of the above exception, another exception occurred:
      |
      | Traceback (most recent call last):
      |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\_core\_tasks.py", line 115, in fail_after
      |     yield cancel_scope
      |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\shared\session.py", line 252, in send_request
      |     response_or_error = await response_stream_reader.receive()
      |                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\streams\memory.py", line 119, in receive
      |     await receive_event.wait()
      |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\locks.py", line 213, in wait
      |     await fut
      | asyncio.exceptions.CancelledError: Cancelled by cancel scope 2523b5d5c90
      |
      | During handling of the above exception, another exception occurred:
      |
      | Traceback (most recent call last):
      |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\shared\session.py", line 247, in send_request
      |     with anyio.fail_after(
      |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\contextlib.py", line 158, in __exit__
      |     self.gen.throw(typ, value, traceback)
      |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\_core\_tasks.py", line 118, in fail_after
      |     raise TimeoutError
      | TimeoutError
      |
      | During handling of the above exception, another exception occurred:
      |
      | Traceback (most recent call last):
      |   File "D:\LLM-backend\.venv\Lib\site-packages\autogen_ext\tools\mcp\_session.py", line 24, in create_mcp_server_session
      |     yield session
      |   File "D:\LLM-backend\.venv\Lib\site-packages\autogen_ext\tools\mcp\_factory.py", line 193, in mcp_server_tools
      |     await temp_session.initialize()
      |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\client\session.py", line 122, in initialize
      |     result = await self.send_request(
      |              ^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\shared\session.py", line 254, in send_request
      |     raise McpError(
      | mcp.shared.exceptions.McpError: Timed out while waiting for response to ClientRequest. Waited 0:01:00 seconds.
      +------------------------------------
    |
    | During handling of the above exception, another exception occurred:
    |
    | Traceback (most recent call last):
    |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\client\stdio\__init__.py", line 177, in stdio_client
    |     await terminate_windows_process(process)
    |   File "D:\LLM-backend\.venv\Lib\site-packages\mcp\client\stdio\win32.py", line 104, in terminate_windows_process
    |     process.terminate()
    |   File "D:\LLM-backend\.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 1085, in terminate
    |     self._process.terminate()
    |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\subprocess.py", line 143, in terminate
    |     self._transport.terminate()
    |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\base_subprocess.py", line 149, in terminate
    |     self._check_proc()
    |   File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\base_subprocess.py", line 142, in _check_proc
    |     raise ProcessLookupError()
    | ProcessLookupError
    +------------------------------------

Expected behavior Should be an answer from the agent.

Which packages was the bug in?

Python Extensions (autogen-ext)

AutoGen library version.

Python 0.5.5

Other library version.

No response

Model used

qwen-max

Model provider

None

Other model provider

No response

Python version

3.11

.NET version

None

Operating system

Windows

yipinterested avatar Apr 29 '25 09:04 yipinterested

How about change like it? Now we have Workbench instead of tools.

import asyncio
import dotenv
import os

from pathlib import Path
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_ext.tools.mcp import StdioServerParams, mcp_server_tools, create_mcp_server_session, McpWorkbench
from autogen_agentchat.agents import AssistantAgent
from autogen_core import CancellationToken

dotenv.load_dotenv()

async def main() -> None:
    # Setup server params for local filesystem access
    params = StdioServerParams(
        command="npx", args=["-y", f"mongodb-lens {os.getenv('MONGODB_URI')}"], read_timeout_seconds=60
    )

    # Get all available tools from the server
    # tools = await mcp_server_tools(params)
    workbench = McpWorkbench(params)

    print(f"Tools: {[tool.name for tool in tools]}")
    model_client = OpenAIChatCompletionClient(
        model="qwen-max",
        base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
        api_key=os.getenv("OPENAI_API_KEY"),
        model_info={
            "vision": False,
            "function_calling": True,
            "json_output": True,
            "family": "qwen",
            "structured_output": True,
        }
    )
    # Create an agent that can use all the tools
    agent = AssistantAgent(
        name="mongohandler",
        model_client=model_client,
        # tools=tools,  # type: ignore
        workbench=workbench,
    )

    # The agent can now use any of the filesystem tools
    await agent.run(task="list all collections", cancellation_token=CancellationToken())


if __name__ == "__main__":
    asyncio.run(main())

I want to test this for you, however I do not have mongoDB for test... haha.

SongChiYoung avatar Apr 29 '25 13:04 SongChiYoung

How about change like it? Now we have Workbench instead of tools.

import asyncio import dotenv import os

from pathlib import Path from autogen_ext.models.openai import OpenAIChatCompletionClient from autogen_ext.tools.mcp import StdioServerParams, mcp_server_tools, create_mcp_server_session, McpWorkbench from autogen_agentchat.agents import AssistantAgent from autogen_core import CancellationToken

dotenv.load_dotenv()

async def main() -> None: # Setup server params for local filesystem access params = StdioServerParams( command="npx", args=["-y", f"mongodb-lens {os.getenv('MONGODB_URI')}"], read_timeout_seconds=60 )

# Get all available tools from the server
# tools = await mcp_server_tools(params)
workbench = McpWorkbench(params)

print(f"Tools: {[tool.name for tool in tools]}")
model_client = OpenAIChatCompletionClient(
    model="qwen-max",
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    api_key=os.getenv("OPENAI_API_KEY"),
    model_info={
        "vision": False,
        "function_calling": True,
        "json_output": True,
        "family": "qwen",
        "structured_output": True,
    }
)
# Create an agent that can use all the tools
agent = AssistantAgent(
    name="mongohandler",
    model_client=model_client,
    # tools=tools,  # type: ignore
    workbench=workbench,
)

# The agent can now use any of the filesystem tools
await agent.run(task="list all collections", cancellation_token=CancellationToken())

if name == "main": asyncio.run(main()) I want to test this for you, however I do not have mongoDB for test... haha.

Thanks for quick response!! I tried other MCP tools like playwright and mcp-server-fetch, and they worked, but the mongodb MCP still doesn't work. I tried to change the command to ["-y", "mongodb-lens", os.getenv('MONGODB_URI')] and it still doesn't work. And I have no idea why. I uesed the same mcp server for same command in other mcp client like cherry studio and n8n and they work just fine lol.

But this time the code doesn't raise errors and it just gets stuck.. I mannually interrupted and here is the traceback

Traceback (most recent call last):
  File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\runners.py", line 118, in run
    return self._loop.run_until_complete(task)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\base_events.py", line 654, in run_until_complete
    return future.result()
           ^^^^^^^^^^^^^^^
  File "d:\LLM-backend\src\mcp_test.py", line 44, in main
    result = await agent.run(task="Summarize the content of https://en.wikipedia.org/wiki/Seattle")
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "D:\LLM-backend\.venv\Lib\site-packages\autogen_agentchat\agents\_base_chat_agent.py", line 136, in run
    response = await self.on_messages(input_messages, cancellation_token)
               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "D:\LLM-backend\.venv\Lib\site-packages\autogen_agentchat\agents\_assistant_agent.py", line 782, in on_messages
    async for message in self.on_messages_stream(messages, cancellation_token):
  File "D:\LLM-backend\.venv\Lib\site-packages\autogen_agentchat\agents\_assistant_agent.py", line 827, in on_messages_stream
    async for inference_output in self._call_llm(
  File "D:\LLM-backend\.venv\Lib\site-packages\autogen_agentchat\agents\_assistant_agent.py", line 935, in _call_llm
    tools = (await workbench.list_tools()) + handoff_tools
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "D:\LLM-backend\.venv\Lib\site-packages\autogen_ext\tools\mcp\_workbench.py", line 88, in list_tools
    result_future = await self._actor.call("list_tools", None)
                    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "D:\LLM-backend\.venv\Lib\site-packages\autogen_ext\tools\mcp\_actor.py", line 58, in call
    res = await fut
          ^^^^^^^^^
asyncio.exceptions.CancelledError

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "d:\LLM-backend\src\mcp_test.py", line 49, in <module>
    asyncio.run(main())
  File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\runners.py", line 190, in run
    return runner.run(main)
           ^^^^^^^^^^^^^^^^
  File "C:\Users\Administrator\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\asyncio\runners.py", line 123, in run
    raise KeyboardInterrupt()
KeyboardInterrupt

yipinterested avatar Apr 30 '25 03:04 yipinterested

Okay I could found errors.

  1. Our McpWorkbench required properties however mongodb-lens's some tools do not has it. I will fix it from when properties is None, -> {}
  2. Our McpWorkbench now does not have stop routine with without async with McpWorkbench(params) as workbench: and lazy init. So, I will adding def __del__: pass just insert that, It could show error.

SongChiYoung avatar May 01 '25 04:05 SongChiYoung