[docs]@deprecated(since="0.2.12",removal="1.0",message=("Refer here for how to incorporate summaries of conversation history: ""https://langchain-ai.github.io/langgraph/how-tos/memory/add-summary-conversation-history/"),)classSummarizerMixin(BaseModel):"""Mixin for summarizer."""human_prefix:str="Human"ai_prefix:str="AI"llm:BaseLanguageModelprompt:BasePromptTemplate=SUMMARY_PROMPTsummary_message_cls:type[BaseMessage]=SystemMessage
[docs]@deprecated(since="0.3.1",removal="1.0.0",message=("Please see the migration guide at: ""https://python.langchain.com/docs/versions/migrating_memory/"),)classConversationSummaryMemory(BaseChatMemory,SummarizerMixin):"""Continually summarizes the conversation history. The summary is updated after each conversation turn. The implementations returns a summary of the conversation history which can be used to provide context to the model. """buffer:str=""memory_key:str="history"#: :meta private:
@propertydefmemory_variables(self)->list[str]:"""Will always return list of memory variables. :meta private: """return[self.memory_key]
[docs]defload_memory_variables(self,inputs:dict[str,Any])->dict[str,Any]:"""Return history buffer."""ifself.return_messages:buffer:Any=[self.summary_message_cls(content=self.buffer)]else:buffer=self.bufferreturn{self.memory_key:buffer}
[docs]@pre_initdefvalidate_prompt_input_variables(cls,values:dict)->dict:"""Validate that prompt input variables are consistent."""prompt_variables=values["prompt"].input_variablesexpected_keys={"summary","new_lines"}ifexpected_keys!=set(prompt_variables):msg=("Got unexpected prompt input variables. The prompt expects "f"{prompt_variables}, but it should have {expected_keys}.")raiseValueError(msg)returnvalues
[docs]defsave_context(self,inputs:dict[str,Any],outputs:dict[str,str])->None:"""Save context from this conversation to buffer."""super().save_context(inputs,outputs)self.buffer=self.predict_new_summary(self.chat_memory.messages[-2:],self.buffer,)