[docs]@deprecated(since="0.3.1",removal="1.0.0",message=("Please see the migration guide at: ""https://python.langchain.com/docs/versions/migrating_memory/"),)classConversationBufferMemory(BaseChatMemory):"""A basic memory implementation that simply stores the conversation history. This stores the entire conversation history in memory without any additional processing. Note that additional processing may be required in some situations when the conversation history is too large to fit in the context window of the model. """human_prefix:str="Human"ai_prefix:str="AI"memory_key:str="history"#: :meta private:@propertydefbuffer(self)->Any:"""String buffer of memory."""returnself.buffer_as_messagesifself.return_messageselseself.buffer_as_str
[docs]asyncdefabuffer(self)->Any:"""String buffer of memory."""return(awaitself.abuffer_as_messages()ifself.return_messageselseawaitself.abuffer_as_str())
def_buffer_as_str(self,messages:List[BaseMessage])->str:returnget_buffer_string(messages,human_prefix=self.human_prefix,ai_prefix=self.ai_prefix,)@propertydefbuffer_as_str(self)->str:"""Exposes the buffer as a string in case return_messages is True."""returnself._buffer_as_str(self.chat_memory.messages)
[docs]asyncdefabuffer_as_str(self)->str:"""Exposes the buffer as a string in case return_messages is True."""messages=awaitself.chat_memory.aget_messages()returnself._buffer_as_str(messages)
@propertydefbuffer_as_messages(self)->List[BaseMessage]:"""Exposes the buffer as a list of messages in case return_messages is False."""returnself.chat_memory.messages
[docs]asyncdefabuffer_as_messages(self)->List[BaseMessage]:"""Exposes the buffer as a list of messages in case return_messages is False."""returnawaitself.chat_memory.aget_messages()
@propertydefmemory_variables(self)->List[str]:"""Will always return list of memory variables. :meta private: """return[self.memory_key]
[docs]defload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,Any]:"""Return history buffer."""return{self.memory_key:self.buffer}
[docs]asyncdefaload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,Any]:"""Return key-value pairs given the text input to the chain."""buffer=awaitself.abuffer()return{self.memory_key:buffer}
[docs]@deprecated(since="0.3.1",removal="1.0.0",message=("Please see the migration guide at: ""https://python.langchain.com/docs/versions/migrating_memory/"),)classConversationStringBufferMemory(BaseMemory):"""A basic memory implementation that simply stores the conversation history. This stores the entire conversation history in memory without any additional processing. Equivalent to ConversationBufferMemory but tailored more specifically for string-based conversations rather than chat models. Note that additional processing may be required in some situations when the conversation history is too large to fit in the context window of the model. """human_prefix:str="Human"ai_prefix:str="AI""""Prefix to use for AI generated responses."""buffer:str=""output_key:Optional[str]=Noneinput_key:Optional[str]=Nonememory_key:str="history"#: :meta private:
[docs]@pre_initdefvalidate_chains(cls,values:Dict)->Dict:"""Validate that return messages is not True."""ifvalues.get("return_messages",False):raiseValueError("return_messages must be False for ConversationStringBufferMemory")returnvalues
@propertydefmemory_variables(self)->List[str]:"""Will always return list of memory variables. :meta private: """return[self.memory_key]
[docs]defload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,str]:"""Return history buffer."""return{self.memory_key:self.buffer}
[docs]asyncdefaload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,str]:"""Return history buffer."""returnself.load_memory_variables(inputs)
[docs]defsave_context(self,inputs:Dict[str,Any],outputs:Dict[str,str])->None:"""Save context from this conversation to buffer."""ifself.input_keyisNone:prompt_input_key=get_prompt_input_key(inputs,self.memory_variables)else:prompt_input_key=self.input_keyifself.output_keyisNone:iflen(outputs)!=1:raiseValueError(f"One output key expected, got {outputs.keys()}")output_key=list(outputs.keys())[0]else:output_key=self.output_keyhuman=f"{self.human_prefix}: "+inputs[prompt_input_key]ai=f"{self.ai_prefix}: "+outputs[output_key]self.buffer+="\n"+"\n".join([human,ai])
[docs]asyncdefasave_context(self,inputs:Dict[str,Any],outputs:Dict[str,str])->None:"""Save context from this conversation to buffer."""returnself.save_context(inputs,outputs)