from__future__importannotationsfromtypingimportAny,Dict,List,Typefromlangchain_core._apiimportdeprecatedfromlangchain_core.cachesimportBaseCacheasBaseCache# For model_rebuildfromlangchain_core.callbacksimportCallbacksasCallbacks# For model_rebuildfromlangchain_core.chat_historyimportBaseChatMessageHistoryfromlangchain_core.language_modelsimportBaseLanguageModelfromlangchain_core.messagesimportBaseMessage,SystemMessage,get_buffer_stringfromlangchain_core.promptsimportBasePromptTemplatefromlangchain_core.utilsimportpre_initfrompydanticimportBaseModelfromlangchain.chains.llmimportLLMChainfromlangchain.memory.chat_memoryimportBaseChatMemoryfromlangchain.memory.promptimportSUMMARY_PROMPT
[docs]@deprecated(since="0.2.12",removal="1.0",message=("Refer here for how to incorporate summaries of conversation history: ""https://langchain-ai.github.io/langgraph/how-tos/memory/add-summary-conversation-history/"# noqa: E501),)classSummarizerMixin(BaseModel):"""Mixin for summarizer."""human_prefix:str="Human"ai_prefix:str="AI"llm:BaseLanguageModelprompt:BasePromptTemplate=SUMMARY_PROMPTsummary_message_cls:Type[BaseMessage]=SystemMessage
[docs]@deprecated(since="0.3.1",removal="1.0.0",message=("Please see the migration guide at: ""https://python.langchain.com/docs/versions/migrating_memory/"),)classConversationSummaryMemory(BaseChatMemory,SummarizerMixin):"""Continually summarizes the conversation history. The summary is updated after each conversation turn. The implementations returns a summary of the conversation history which can be used to provide context to the model. """buffer:str=""memory_key:str="history"#: :meta private:
@propertydefmemory_variables(self)->List[str]:"""Will always return list of memory variables. :meta private: """return[self.memory_key]
[docs]defload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,Any]:"""Return history buffer."""ifself.return_messages:buffer:Any=[self.summary_message_cls(content=self.buffer)]else:buffer=self.bufferreturn{self.memory_key:buffer}
[docs]@pre_initdefvalidate_prompt_input_variables(cls,values:Dict)->Dict:"""Validate that prompt input variables are consistent."""prompt_variables=values["prompt"].input_variablesexpected_keys={"summary","new_lines"}ifexpected_keys!=set(prompt_variables):raiseValueError("Got unexpected prompt input variables. The prompt expects "f"{prompt_variables}, but it should have {expected_keys}.")returnvalues
[docs]defsave_context(self,inputs:Dict[str,Any],outputs:Dict[str,str])->None:"""Save context from this conversation to buffer."""super().save_context(inputs,outputs)self.buffer=self.predict_new_summary(self.chat_memory.messages[-2:],self.buffer)