[docs]@deprecated(since="0.2.12",removal="1.0",message=("Refer here for how to incorporate summaries of conversation history: ""https://langchain-ai.github.io/langgraph/how-tos/memory/add-summary-conversation-history/"# noqa: E501),)classSummarizerMixin(BaseModel):"""Mixin for summarizer."""human_prefix:str="Human"ai_prefix:str="AI"llm:BaseLanguageModelprompt:BasePromptTemplate=SUMMARY_PROMPTsummary_message_cls:Type[BaseMessage]=SystemMessage
@propertydefmemory_variables(self)->List[str]:"""Will always return list of memory variables. :meta private: """return[self.memory_key]
[docs]defload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,Any]:"""Return history buffer."""ifself.return_messages:buffer:Any=[self.summary_message_cls(content=self.buffer)]else:buffer=self.bufferreturn{self.memory_key:buffer}
@pre_initdefvalidate_prompt_input_variables(cls,values:Dict)->Dict:"""Validate that prompt input variables are consistent."""prompt_variables=values["prompt"].input_variablesexpected_keys={"summary","new_lines"}ifexpected_keys!=set(prompt_variables):raiseValueError("Got unexpected prompt input variables. The prompt expects "f"{prompt_variables}, but it should have {expected_keys}.")returnvalues
[docs]defsave_context(self,inputs:Dict[str,Any],outputs:Dict[str,str])->None:"""Save context from this conversation to buffer."""super().save_context(inputs,outputs)self.buffer=self.predict_new_summary(self.chat_memory.messages[-2:],self.buffer)