Source code for langchain_community.callbacks.context_callback
"""Callback handler for Context AI"""importosfromtypingimportAny,Dict,ListfromuuidimportUUIDfromlangchain_core.callbacksimportBaseCallbackHandlerfromlangchain_core.messagesimportBaseMessagefromlangchain_core.outputsimportLLMResultfromlangchain_core.utilsimportguard_import
[docs]defimport_context()->Any:"""Import the `getcontext` package."""return(guard_import("getcontext",pip_name="python-context"),guard_import("getcontext.token",pip_name="python-context").Credential,guard_import("getcontext.generated.models",pip_name="python-context").Conversation,guard_import("getcontext.generated.models",pip_name="python-context").Message,guard_import("getcontext.generated.models",pip_name="python-context").MessageRole,guard_import("getcontext.generated.models",pip_name="python-context").Rating,)
[docs]classContextCallbackHandler(BaseCallbackHandler):"""Callback Handler that records transcripts to the Context service. (https://context.ai). Keyword Args: token (optional): The token with which to authenticate requests to Context. Visit https://with.context.ai/settings to generate a token. If not provided, the value of the `CONTEXT_TOKEN` environment variable will be used. Raises: ImportError: if the `context-python` package is not installed. Chat Example: >>> from langchain_community.llms import ChatOpenAI >>> from langchain_community.callbacks import ContextCallbackHandler >>> context_callback = ContextCallbackHandler( ... token="<CONTEXT_TOKEN_HERE>", ... ) >>> chat = ChatOpenAI( ... temperature=0, ... headers={"user_id": "123"}, ... callbacks=[context_callback], ... openai_api_key="API_KEY_HERE", ... ) >>> messages = [ ... SystemMessage(content="You translate English to French."), ... HumanMessage(content="I love programming with LangChain."), ... ] >>> chat.invoke(messages) Chain Example: >>> from langchain.chains import LLMChain >>> from langchain_community.chat_models import ChatOpenAI >>> from langchain_community.callbacks import ContextCallbackHandler >>> context_callback = ContextCallbackHandler( ... token="<CONTEXT_TOKEN_HERE>", ... ) >>> human_message_prompt = HumanMessagePromptTemplate( ... prompt=PromptTemplate( ... template="What is a good name for a company that makes {product}?", ... input_variables=["product"], ... ), ... ) >>> chat_prompt_template = ChatPromptTemplate.from_messages( ... [human_message_prompt] ... ) >>> callback = ContextCallbackHandler(token) >>> # Note: the same callback object must be shared between the ... LLM and the chain. >>> chat = ChatOpenAI(temperature=0.9, callbacks=[callback]) >>> chain = LLMChain( ... llm=chat, ... prompt=chat_prompt_template, ... callbacks=[callback] ... ) >>> chain.run("colorful socks") """
[docs]defon_chat_model_start(self,serialized:Dict[str,Any],messages:List[List[BaseMessage]],*,run_id:UUID,**kwargs:Any,)->Any:"""Run when the chat model is started."""llm_model=kwargs.get("invocation_params",{}).get("model",None)ifllm_modelisnotNone:self.metadata["model"]=llm_modeliflen(messages)==0:returnformessageinmessages[0]:role=self.message_role_model.SYSTEMifmessage.type=="human":role=self.message_role_model.USERelifmessage.type=="system":role=self.message_role_model.SYSTEMelifmessage.type=="ai":role=self.message_role_model.ASSISTANTself.messages.append(self.message_model(message=message.content,role=role,))
[docs]defon_llm_end(self,response:LLMResult,**kwargs:Any)->None:"""Run when LLM ends."""iflen(response.generations)==0orlen(response.generations[0])==0:returnifnotself.chain_run_id:generation=response.generations[0][0]self.messages.append(self.message_model(message=generation.text,role=self.message_role_model.ASSISTANT,))self._log_conversation()
[docs]defon_chain_start(self,serialized:Dict[str,Any],inputs:Dict[str,Any],**kwargs:Any)->None:"""Run when chain starts."""self.chain_run_id=kwargs.get("run_id",None)
[docs]defon_chain_end(self,outputs:Dict[str,Any],**kwargs:Any)->None:"""Run when chain ends."""self.messages.append(self.message_model(message=outputs["text"],role=self.message_role_model.ASSISTANT,))self._log_conversation()self.chain_run_id=None
def_log_conversation(self)->None:"""Log the conversation to the context API."""iflen(self.messages)==0:returnself.client.log.conversation_upsert(body={"conversation":self.conversation_model(messages=self.messages,metadata=self.metadata,)})self.messages=[]self.metadata={}