[docs]@deprecated(since="0.2.12",removal="1.0",message=("Refer here for how to incorporate summaries of conversation history: ""https://langchain-ai.github.io/langgraph/how-tos/memory/add-summary-conversation-history/"),)classSummarizerMixin(BaseModel):"""Mixin for summarizer."""human_prefix:str="Human"ai_prefix:str="AI"llm:BaseLanguageModelprompt:BasePromptTemplate=SUMMARY_PROMPTsummary_message_cls:type[BaseMessage]=SystemMessage
[docs]defpredict_new_summary(self,messages:list[BaseMessage],existing_summary:str,)->str:"""Predict a new summary based on the messages and existing summary. Args: messages: List of messages to summarize. existing_summary: Existing summary to build upon. Returns: A new summary string. """new_lines=get_buffer_string(messages,human_prefix=self.human_prefix,ai_prefix=self.ai_prefix,)chain=LLMChain(llm=self.llm,prompt=self.prompt)returnchain.predict(summary=existing_summary,new_lines=new_lines)
[docs]asyncdefapredict_new_summary(self,messages:list[BaseMessage],existing_summary:str,)->str:"""Predict a new summary based on the messages and existing summary. Args: messages: List of messages to summarize. existing_summary: Existing summary to build upon. Returns: A new summary string. """new_lines=get_buffer_string(messages,human_prefix=self.human_prefix,ai_prefix=self.ai_prefix,)chain=LLMChain(llm=self.llm,prompt=self.prompt)returnawaitchain.apredict(summary=existing_summary,new_lines=new_lines)
[docs]@deprecated(since="0.3.1",removal="1.0.0",message=("Please see the migration guide at: ""https://python.langchain.com/docs/versions/migrating_memory/"),)classConversationSummaryMemory(BaseChatMemory,SummarizerMixin):"""Continually summarizes the conversation history. The summary is updated after each conversation turn. The implementations returns a summary of the conversation history which can be used to provide context to the model. """buffer:str=""memory_key:str="history"#: :meta private:
[docs]@classmethoddeffrom_messages(cls,llm:BaseLanguageModel,chat_memory:BaseChatMessageHistory,*,summarize_step:int=2,**kwargs:Any,)->ConversationSummaryMemory:"""Create a ConversationSummaryMemory from a list of messages. Args: llm: The language model to use for summarization. chat_memory: The chat history to summarize. summarize_step: Number of messages to summarize at a time. **kwargs: Additional keyword arguments to pass to the class. Returns: An instance of ConversationSummaryMemory with the summarized history. """obj=cls(llm=llm,chat_memory=chat_memory,**kwargs)foriinrange(0,len(obj.chat_memory.messages),summarize_step):obj.buffer=obj.predict_new_summary(obj.chat_memory.messages[i:i+summarize_step],obj.buffer,)returnobj
@propertydefmemory_variables(self)->list[str]:"""Will always return list of memory variables. :meta private: """return[self.memory_key]
[docs]@overridedefload_memory_variables(self,inputs:dict[str,Any])->dict[str,Any]:"""Return history buffer."""ifself.return_messages:buffer:Any=[self.summary_message_cls(content=self.buffer)]else:buffer=self.bufferreturn{self.memory_key:buffer}
[docs]@pre_initdefvalidate_prompt_input_variables(cls,values:dict)->dict:"""Validate that prompt input variables are consistent."""prompt_variables=values["prompt"].input_variablesexpected_keys={"summary","new_lines"}ifexpected_keys!=set(prompt_variables):msg=("Got unexpected prompt input variables. The prompt expects "f"{prompt_variables}, but it should have {expected_keys}.")raiseValueError(msg)returnvalues
[docs]defsave_context(self,inputs:dict[str,Any],outputs:dict[str,str])->None:"""Save context from this conversation to buffer."""super().save_context(inputs,outputs)self.buffer=self.predict_new_summary(self.chat_memory.messages[-2:],self.buffer,)