MetaGPT icon indicating copy to clipboard operation
MetaGPT copied to clipboard

error by demo running, installed as docker

Open delinecnlin opened this issue 2 years ago • 2 comments
trafficstars

Unclosed connection client_connection: Connection<ConnectionKey(host='xxx.openai.azure.com', port=443, is_ssl=True, ssl=None, proxy=None, proxy_auth=None, proxy_headers_hash=None)> an error occurred during closing of asynchronous generator <async_generator object aiohttp_session at 0x7f521433bee0> asyncgen: <async_generator object aiohttp_session at 0x7f521433bee0> Traceback (most recent call last): File "/usr/local/lib/python3.9/site-packages/tenacity/_asyncio.py", line 50, in call result = await fn(*args, **kwargs) File "/app/metagpt/metagpt/actions/action.py", line 57, in _aask_v1 content = await self.llm.aask(prompt, system_msgs) File "/app/metagpt/metagpt/provider/base_gpt_api.py", line 44, in aask rsp = await self.acompletion_text(message, stream=True) File "/app/metagpt/metagpt/provider/openai_api.py", line 27, in wrapper return await f(*args, **kwargs) File "/app/metagpt/metagpt/provider/openai_api.py", line 213, in acompletion_text return await self._achat_completion_stream(messages) File "/app/metagpt/metagpt/provider/openai_api.py", line 157, in _achat_completion_stream chunk_message = chunk['choices'][0]['delta'] # extract the message IndexError: list index out of range

The above exception was the direct cause of the following exception:

Traceback (most recent call last): File "/usr/local/lib/python3.9/asyncio/runners.py", line 44, in run return loop.run_until_complete(main) File "/usr/local/lib/python3.9/asyncio/base_events.py", line 647, in run_until_complete return future.result() File "/app/metagpt/startup.py", line 15, in startup await company.run(n_round=n_round) File "/app/metagpt/metagpt/software_company.py", line 60, in run await self.environment.run() File "/app/metagpt/metagpt/environment.py", line 64, in run await asyncio.gather(*futures) File "/app/metagpt/metagpt/roles/role.py", line 229, in run rsp = await self._react() File "/app/metagpt/metagpt/roles/role.py", line 200, in _react return await self._act() File "/app/metagpt/metagpt/roles/role.py", line 156, in _act response = await self._rc.todo.run(self._rc.important_memory) File "/app/metagpt/metagpt/actions/write_prd.py", line 139, in run prd = await self._aask_v1(prompt, "prd", OUTPUT_MAPPING) File "/usr/local/lib/python3.9/site-packages/tenacity/_asyncio.py", line 88, in async_wrapped return await fn(*args, **kwargs) File "/usr/local/lib/python3.9/site-packages/tenacity/_asyncio.py", line 47, in call do = self.iter(retry_state=retry_state) File "/usr/local/lib/python3.9/site-packages/tenacity/init.py", line 326, in iter raise retry_exc from fut.exception() tenacity.RetryError: RetryError[<Future at 0x7f521439b400 state=finished raised IndexError>]

During handling of the above exception, another exception occurred:

RuntimeError: aclose(): asynchronous generator is already running Traceback (most recent call last): File "/usr/local/lib/python3.9/site-packages/tenacity/_asyncio.py", line 50, in call result = await fn(*args, **kwargs) File "/app/metagpt/metagpt/actions/action.py", line 57, in _aask_v1 content = await self.llm.aask(prompt, system_msgs) File "/app/metagpt/metagpt/provider/base_gpt_api.py", line 44, in aask rsp = await self.acompletion_text(message, stream=True) File "/app/metagpt/metagpt/provider/openai_api.py", line 27, in wrapper return await f(*args, **kwargs) File "/app/metagpt/metagpt/provider/openai_api.py", line 213, in acompletion_text return await self._achat_completion_stream(messages) File "/app/metagpt/metagpt/provider/openai_api.py", line 157, in _achat_completion_stream chunk_message = chunk['choices'][0]['delta'] # extract the message IndexError: list index out of range

The above exception was the direct cause of the following exception:

Traceback (most recent call last): File "/app/metagpt/startup.py", line 29, in fire.Fire(main) File "/usr/local/lib/python3.9/site-packages/fire/core.py", line 141, in Fire component_trace = _Fire(component, args, parsed_flag_args, context, name) File "/usr/local/lib/python3.9/site-packages/fire/core.py", line 466, in _Fire component, remaining_args = _CallAndUpdateTrace( File "/usr/local/lib/python3.9/site-packages/fire/core.py", line 681, in _CallAndUpdateTrace component = fn(*varargs, **kwargs) File "/app/metagpt/startup.py", line 25, in main asyncio.run(startup(idea, investment, n_round)) File "/usr/local/lib/python3.9/asyncio/runners.py", line 44, in run return loop.run_until_complete(main) File "/usr/local/lib/python3.9/asyncio/base_events.py", line 647, in run_until_complete return future.result() File "/app/metagpt/startup.py", line 15, in startup await company.run(n_round=n_round) File "/app/metagpt/metagpt/software_company.py", line 60, in run await self.environment.run() File "/app/metagpt/metagpt/environment.py", line 64, in run await asyncio.gather(*futures) File "/app/metagpt/metagpt/roles/role.py", line 229, in run rsp = await self._react() File "/app/metagpt/metagpt/roles/role.py", line 200, in _react return await self._act() File "/app/metagpt/metagpt/roles/role.py", line 156, in _act response = await self._rc.todo.run(self._rc.important_memory) File "/app/metagpt/metagpt/actions/write_prd.py", line 139, in run prd = await self._aask_v1(prompt, "prd", OUTPUT_MAPPING) File "/usr/local/lib/python3.9/site-packages/tenacity/_asyncio.py", line 88, in async_wrapped return await fn(*args, **kwargs) File "/usr/local/lib/python3.9/site-packages/tenacity/_asyncio.py", line 47, in call do = self.iter(retry_state=retry_state) File "/usr/local/lib/python3.9/site-packages/tenacity/init.py", line 326, in iter raise retry_exc from fut.exception() tenacity.RetryError: RetryError[<Future at 0x7f521439b400 state=finished raised IndexError>

delinecnlin avatar Sep 03 '23 01:09 delinecnlin

Streaming not supported?

geekan avatar Sep 03 '23 13:09 geekan

You mean my gpt model?it’s gpt-4-32k, it should support stream.

delinecnlin avatar Sep 03 '23 23:09 delinecnlin