Source code for langchain_community.chat_models.perplexity
"""Wrapper around Perplexity APIs."""from__future__importannotationsimportloggingfromtypingimport(Any,Dict,Iterator,List,Mapping,Optional,Tuple,Type,Union,)fromlangchain_core.callbacksimportCallbackManagerForLLMRunfromlangchain_core.language_models.chat_modelsimport(BaseChatModel,generate_from_stream,)fromlangchain_core.messagesimport(AIMessage,AIMessageChunk,BaseMessage,BaseMessageChunk,ChatMessage,ChatMessageChunk,FunctionMessageChunk,HumanMessage,HumanMessageChunk,SystemMessage,SystemMessageChunk,ToolMessageChunk,)fromlangchain_core.outputsimportChatGeneration,ChatGenerationChunk,ChatResultfromlangchain_core.pydantic_v1importField,root_validatorfromlangchain_core.utilsimportget_from_dict_or_env,get_pydantic_field_nameslogger=logging.getLogger(__name__)
[docs]classChatPerplexity(BaseChatModel):"""`Perplexity AI` Chat models API. To use, you should have the ``openai`` python package installed, and the environment variable ``PPLX_API_KEY`` set to your API key. Any parameters that are valid to be passed to the openai.create call can be passed in, even if not explicitly saved on this class. Example: .. code-block:: python from langchain_community.chat_models import ChatPerplexity chat = ChatPerplexity( model="llama-3.1-sonar-small-128k-online", temperature=0.7, ) """client:Any#: :meta private:model:str="llama-3.1-sonar-small-128k-online""""Model name."""temperature:float=0.7"""What sampling temperature to use."""model_kwargs:Dict[str,Any]=Field(default_factory=dict)"""Holds any model parameters valid for `create` call not explicitly specified."""pplx_api_key:Optional[str]=Field(None,alias="api_key")"""Base URL path for API requests, leave blank if not using a proxy or service emulator."""request_timeout:Optional[Union[float,Tuple[float,float]]]=Field(None,alias="timeout")"""Timeout for requests to PerplexityChat completion API. Default is None."""max_retries:int=6"""Maximum number of retries to make when generating."""streaming:bool=False"""Whether to stream the results or not."""max_tokens:Optional[int]=None"""Maximum number of tokens to generate."""classConfig:allow_population_by_field_name=True@propertydeflc_secrets(self)->Dict[str,str]:return{"pplx_api_key":"PPLX_API_KEY"}@root_validator(pre=True)defbuild_extra(cls,values:Dict[str,Any])->Dict[str,Any]:"""Build extra kwargs from additional params that were passed in."""all_required_field_names=get_pydantic_field_names(cls)extra=values.get("model_kwargs",{})forfield_nameinlist(values):iffield_nameinextra:raiseValueError(f"Found {field_name} supplied twice.")iffield_namenotinall_required_field_names:logger.warning(f"""WARNING! {field_name} is not a default parameter.{field_name} was transferred to model_kwargs. Please confirm that {field_name} is what you intended.""")extra[field_name]=values.pop(field_name)invalid_model_kwargs=all_required_field_names.intersection(extra.keys())ifinvalid_model_kwargs:raiseValueError(f"Parameters {invalid_model_kwargs} should be specified explicitly. "f"Instead they were passed in as part of `model_kwargs` parameter.")values["model_kwargs"]=extrareturnvalues@root_validator(pre=False,skip_on_failure=True)defvalidate_environment(cls,values:Dict)->Dict:"""Validate that api key and python package exists in environment."""values["pplx_api_key"]=get_from_dict_or_env(values,"pplx_api_key","PPLX_API_KEY")try:importopenaiexceptImportError:raiseImportError("Could not import openai python package. ""Please install it with `pip install openai`.")try:values["client"]=openai.OpenAI(api_key=values["pplx_api_key"],base_url="https://api.perplexity.ai")exceptAttributeError:raiseValueError("`openai` has no `ChatCompletion` attribute, this is likely ""due to an old version of the openai package. Try upgrading it ""with `pip install --upgrade openai`.")returnvalues@propertydef_default_params(self)->Dict[str,Any]:"""Get the default parameters for calling PerplexityChat API."""return{"request_timeout":self.request_timeout,"max_tokens":self.max_tokens,"stream":self.streaming,"temperature":self.temperature,**self.model_kwargs,}def_convert_message_to_dict(self,message:BaseMessage)->Dict[str,Any]:ifisinstance(message,ChatMessage):message_dict={"role":message.role,"content":message.content}elifisinstance(message,SystemMessage):message_dict={"role":"system","content":message.content}elifisinstance(message,HumanMessage):message_dict={"role":"user","content":message.content}elifisinstance(message,AIMessage):message_dict={"role":"assistant","content":message.content}else:raiseTypeError(f"Got unknown type {message}")returnmessage_dictdef_create_message_dicts(self,messages:List[BaseMessage],stop:Optional[List[str]])->Tuple[List[Dict[str,Any]],Dict[str,Any]]:params=dict(self._invocation_params)ifstopisnotNone:if"stop"inparams:raiseValueError("`stop` found in both the input and default params.")params["stop"]=stopmessage_dicts=[self._convert_message_to_dict(m)forminmessages]returnmessage_dicts,paramsdef_convert_delta_to_message_chunk(self,_dict:Mapping[str,Any],default_class:Type[BaseMessageChunk])->BaseMessageChunk:role=_dict.get("role")content=_dict.get("content")or""additional_kwargs:Dict={}if_dict.get("function_call"):function_call=dict(_dict["function_call"])if"name"infunction_callandfunction_call["name"]isNone:function_call["name"]=""additional_kwargs["function_call"]=function_callif_dict.get("tool_calls"):additional_kwargs["tool_calls"]=_dict["tool_calls"]ifrole=="user"ordefault_class==HumanMessageChunk:returnHumanMessageChunk(content=content)elifrole=="assistant"ordefault_class==AIMessageChunk:returnAIMessageChunk(content=content,additional_kwargs=additional_kwargs)elifrole=="system"ordefault_class==SystemMessageChunk:returnSystemMessageChunk(content=content)elifrole=="function"ordefault_class==FunctionMessageChunk:returnFunctionMessageChunk(content=content,name=_dict["name"])elifrole=="tool"ordefault_class==ToolMessageChunk:returnToolMessageChunk(content=content,tool_call_id=_dict["tool_call_id"])elifroleordefault_class==ChatMessageChunk:returnChatMessageChunk(content=content,role=role)# type: ignore[arg-type]else:returndefault_class(content=content)# type: ignore[call-arg]def_stream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs}default_chunk_class=AIMessageChunkifstop:params["stop_sequences"]=stopstream_resp=self.client.chat.completions.create(model=params["model"],messages=message_dicts,stream=True)forchunkinstream_resp:ifnotisinstance(chunk,dict):chunk=chunk.dict()iflen(chunk["choices"])==0:continuechoice=chunk["choices"][0]chunk=self._convert_delta_to_message_chunk(choice["delta"],default_chunk_class)finish_reason=choice.get("finish_reason")generation_info=(dict(finish_reason=finish_reason)iffinish_reasonisnotNoneelseNone)default_chunk_class=chunk.__class__chunk=ChatGenerationChunk(message=chunk,generation_info=generation_info)ifrun_manager:run_manager.on_llm_new_token(chunk.text,chunk=chunk)yieldchunkdef_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:ifself.streaming:stream_iter=self._stream(messages,stop=stop,run_manager=run_manager,**kwargs)ifstream_iter:returngenerate_from_stream(stream_iter)message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs}response=self.client.chat.completions.create(model=params["model"],messages=message_dicts)message=AIMessage(content=response.choices[0].message.content)returnChatResult(generations=[ChatGeneration(message=message)])@propertydef_invocation_params(self)->Mapping[str,Any]:"""Get the parameters used to invoke the model."""pplx_creds:Dict[str,Any]={"api_key":self.pplx_api_key,"api_base":"https://api.perplexity.ai","model":self.model,}return{**pplx_creds,**self._default_params}@propertydef_llm_type(self)->str:"""Return type of chat model."""return"perplexitychat"