Source code for langchain.agents.openai_functions_agent.agent_token_buffer_memory
"""Memory used to save agent output AND intermediate steps."""fromtypingimportAny,Dict,Listfromlangchain_core.language_modelsimportBaseLanguageModelfromlangchain_core.messagesimportBaseMessage,get_buffer_stringfromlangchain.agents.format_scratchpadimport(format_to_openai_function_messages,format_to_tool_messages,)fromlangchain.memory.chat_memoryimportBaseChatMemory
[docs]classAgentTokenBufferMemory(BaseChatMemory):# type: ignore[override]"""Memory used to save agent output AND intermediate steps. Parameters: human_prefix: Prefix for human messages. Default is "Human". ai_prefix: Prefix for AI messages. Default is "AI". llm: Language model. memory_key: Key to save memory under. Default is "history". max_token_limit: Maximum number of tokens to keep in the buffer. Once the buffer exceeds this many tokens, the oldest messages will be pruned. Default is 12000. return_messages: Whether to return messages. Default is True. output_key: Key to save output under. Default is "output". intermediate_steps_key: Key to save intermediate steps under. Default is "intermediate_steps". format_as_tools: Whether to format as tools. Default is False. """human_prefix:str="Human"ai_prefix:str="AI"llm:BaseLanguageModelmemory_key:str="history"max_token_limit:int=12000"""The max number of tokens to keep in the buffer. Once the buffer exceeds this many tokens, the oldest messages will be pruned."""return_messages:bool=Trueoutput_key:str="output"intermediate_steps_key:str="intermediate_steps"format_as_tools:bool=False@propertydefbuffer(self)->List[BaseMessage]:"""String buffer of memory."""returnself.chat_memory.messages@propertydefmemory_variables(self)->List[str]:"""Always return list of memory variables. :meta private: """return[self.memory_key]
[docs]defload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,Any]:"""Return history buffer. Args: inputs: Inputs to the agent. Returns: A dictionary with the history buffer. """ifself.return_messages:final_buffer:Any=self.bufferelse:final_buffer=get_buffer_string(self.buffer,human_prefix=self.human_prefix,ai_prefix=self.ai_prefix,)return{self.memory_key:final_buffer}
[docs]defsave_context(self,inputs:Dict[str,Any],outputs:Dict[str,Any])->None:"""Save context from this conversation to buffer. Pruned. Args: inputs: Inputs to the agent. outputs: Outputs from the agent. """input_str,output_str=self._get_input_output(inputs,outputs)self.chat_memory.add_user_message(input_str)format_to_messages=(format_to_tool_messagesifself.format_as_toolselseformat_to_openai_function_messages)steps=format_to_messages(outputs[self.intermediate_steps_key])formsginsteps:self.chat_memory.add_message(msg)self.chat_memory.add_ai_message(output_str)# Prune buffer if it exceeds max token limitbuffer=self.chat_memory.messagescurr_buffer_length=self.llm.get_num_tokens_from_messages(buffer)ifcurr_buffer_length>self.max_token_limit:whilecurr_buffer_length>self.max_token_limit:buffer.pop(0)curr_buffer_length=self.llm.get_num_tokens_from_messages(buffer)