The program stops after agent posted
LIUQI-creat opened this issue · 2 comments
Hi,
Thanks for your excellent work!
I would like to know what would be an interesting scenario when the number of agents is 1000. I just change the agent_num to 1000 in config.yaml.
The program executes normally when the agent chats with other agents or enters the recommender system. But when the agent posts on the social network, the terminal gets stuck, like this:
NFO:12700:Linda Davis is going to social media.
INFO:12700:Linda Davis is posting.
INFO:12700:Linda Davis posted: "Hey everyone! Looking for some great movie recommendations, haven't watched anything recently but excited to hear your suggestions!"
When I CTRL+C, the error is :
Traceback (most recent call last):
File "D:\YuLan-Rec-main\YuLan-Rec-main\simulator.py", line 1220, in <module>
main()
File "D:\code\YuLan-Rec-main\YuLan-Rec-main\simulator.py", line 1211, in main
message = recagent.round()
File "D:\code\YuLan-Rec-main\YuLan-Rec-main\simulator.py", line 828, in round
msgs = self.one_step(i)
File "D:\code\YuLan-Rec-main\YuLan-Rec-main\simulator.py", line 752, in one_step
self.agents[i].memory.add_memory(
File "D:\code\YuLan-Rec-main\YuLan-Rec-main\agents\recagent_memory.py", line 867, in add_memory
self.save_context(
File "D:\code\YuLan-Rec-main\YuLan-Rec-main\agents\recagent_memory.py", line 903, in save_context
stm_memory_list = self.sensoryMemory.add_ssm(obs)
File "D:\code\YuLan-Rec-main\YuLan-Rec-main\agents\recagent_memory.py", line 177, in add_ssm
return self.dump_shortTerm_list()
File "D:\code\YuLan-Rec-main\YuLan-Rec-main\agents\recagent_memory.py", line 149, in dump_shortTerm_list
result = LLMChain(llm=self.llm, prompt=prompt).run({})
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chains\base.py", line 501, in run
return self(args[0], callbacks=callbacks, tags=tags, metadata=metadata)[
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chains\base.py", line 306, in __call__
raise e
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chains\base.py", line 300, in __call__
self._call(inputs, run_manager=run_manager)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chains\llm.py", line 93, in _call
response = self.generate([inputs], run_manager=run_manager)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chains\llm.py", line 103, in generate
return self.llm.generate_prompt(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chat_models\base.py", line 469, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chat_models\base.py", line 359, in generate
raise e
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chat_models\base.py", line 349, in generate
self._generate_with_cache(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chat_models\base.py", line 501, in _generate_with_cache
return self._generate(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chat_models\openai.py", line 360, in _generate
response = self.completion_with_retry(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chat_models\openai.py", line 299, in completion_with_retry
return _completion_with_retry(**kwargs)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\tenacity\__init__.py", line 289, in wrapped_f
return self(f, *args, **kw)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\tenacity\__init__.py", line 379, in __call__
do = self.iter(retry_state=retry_state)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\tenacity\__init__.py", line 314, in iter
return fut.result()
File "D:\applications\anaconda3\envs\recagents\lib\concurrent\futures\_base.py", line 439, in result
return self.__get_result()
File "D:\applications\anaconda3\envs\recagents\lib\concurrent\futures\_base.py", line 391, in __get_result
raise self._exception
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\tenacity\__init__.py", line 382, in __call__
result = fn(*args, **kwargs)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\langchain\chat_models\openai.py", line 297, in _completion_with_retry
return self.client.create(**kwargs)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create
return super().create(*args, **kwargs)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\openai\api_resources\abstract\engine_api_resource.py", line 153, in create
response, _, api_key = requestor.request(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\openai\api_requestor.py", line 288, in request
result = self.request_raw(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\openai\api_requestor.py", line 596, in request_raw
result = _thread_context.session.request(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\requests\sessions.py", line 589, in request
resp = self.send(prep, **send_kwargs)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\requests\sessions.py", line 703, in send
r = adapter.send(request, **kwargs)
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\requests\adapters.py", line 486, in send
resp = conn.urlopen(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\urllib3\connectionpool.py", line 714, in urlopen
httplib_response = self._make_request(
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\urllib3\connectionpool.py", line 466, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "D:\applications\anaconda3\envs\recagents\lib\site-packages\urllib3\connectionpool.py", line 461, in _make_request
httplib_response = conn.getresponse()
File "D:\applications\anaconda3\envs\recagents\lib\http\client.py", line 1377, in getresponse
response.begin()
File "D:\applications\anaconda3\envs\recagents\lib\http\client.py", line 320, in begin
version, status, reason = self._read_status()
File "D:\applications\anaconda3\envs\recagents\lib\http\client.py", line 281, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "D:\applications\anaconda3\envs\recagents\lib\socket.py", line 704, in readinto
return self._sock.recv_into(b)
File "D:\applications\anaconda3\envs\recagents\lib\ssl.py", line 1275, in recv_into
return self.read(nbytes, buffer)
File "D:\applications\anaconda3\envs\recagents\lib\ssl.py", line 1133, in read
return self._sslobj.read(len, buffer)
The code I'm using is up to date, and I've tried changing the active_agent_threshold to 1000 in config.yaml, as well as falling back to the version before this change #3 , but neither works.
I tried with an agent count of 500 and the program is fine. It took seven minutes to get the post to 10 agents.
Is this because when the number of agents reaches 1,000, "Linda Davis" has a lot of friends, so they will all see the post and save it to their memories, resulting in a long time?
@LIUQI-creat Hi, I believe you are correct. From the screenshot above, it appears that it stopped during the self.agents[i].memory.add_memory
process, which may be time-consuming due to adding memory.