[docs]defget_role(message:BaseMessage)->str:"""Get role of the message. Args: message (BaseMessage): The message object. Raises: ValueError: Raised when the message is of an unknown type. Returns: str: The role of the message. """ifisinstance(message,ChatMessage)orisinstance(message,HumanMessage):return"user"ifisinstance(message,AIMessage):return"assistant"ifisinstance(message,SystemMessage):return"system"raiseValueError(f"Got unknown type {message}")
[docs]defget_chat_request(messages:List[BaseMessage])->Dict[str,Any]:"""Get a request of the Friendli chat API. Args: messages (List[BaseMessage]): Messages comprising the conversation so far. Returns: Dict[str, Any]: The request for the Friendli chat API. """return{"messages":[{"role":get_role(message),"content":message.content}formessageinmessages]}
[docs]classChatFriendli(BaseChatModel,BaseFriendli):"""Friendli LLM for chat. ``friendli-client`` package should be installed with `pip install friendli-client`. You must set ``FRIENDLI_TOKEN`` environment variable or provide the value of your personal access token for the ``friendli_token`` argument. Example: .. code-block:: python from langchain_community.chat_models import FriendliChat chat = Friendli( model="llama-2-13b-chat", friendli_token="YOUR FRIENDLI TOKEN" ) chat.invoke("What is generative AI?") """model:str="llama-2-13b-chat"@propertydeflc_secrets(self)->Dict[str,str]:return{"friendli_token":"FRIENDLI_TOKEN"}@propertydef_default_params(self)->Dict[str,Any]:"""Get the default parameters for calling Friendli completions API."""return{"frequency_penalty":self.frequency_penalty,"presence_penalty":self.presence_penalty,"max_tokens":self.max_tokens,"stop":self.stop,"temperature":self.temperature,"top_p":self.top_p,}@propertydef_identifying_params(self)->Dict[str,Any]:"""Get the identifying parameters."""return{"model":self.model,**self._default_params}@propertydef_llm_type(self)->str:return"friendli-chat"def_get_invocation_params(self,stop:Optional[List[str]]=None,**kwargs:Any)->Dict[str,Any]:"""Get the parameters used to invoke the model."""params=self._default_paramsifself.stopisnotNoneandstopisnotNone:raiseValueError("`stop` found in both the input and default params.")elifself.stopisnotNone:params["stop"]=self.stopelse:params["stop"]=stopreturn{**params,**kwargs}def_stream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:params=self._get_invocation_params(stop=stop,**kwargs)stream=self.client.chat.completions.create(**get_chat_request(messages),stream=True,model=self.model,**params)forchunkinstream:delta=chunk.choices[0].delta.contentifdelta:ifrun_manager:run_manager.on_llm_new_token(delta)yieldChatGenerationChunk(message=AIMessageChunk(content=delta))asyncdef_astream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[AsyncCallbackManagerForLLMRun]=None,**kwargs:Any,)->AsyncIterator[ChatGenerationChunk]:params=self._get_invocation_params(stop=stop,**kwargs)stream=awaitself.async_client.chat.completions.create(**get_chat_request(messages),stream=True,model=self.model,**params)asyncforchunkinstream:delta=chunk.choices[0].delta.contentifdelta:ifrun_manager:awaitrun_manager.on_llm_new_token(delta)yieldChatGenerationChunk(message=AIMessageChunk(content=delta))def_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:ifself.streaming:stream_iter=self._stream(messages,stop=stop,run_manager=run_manager,**kwargs)returngenerate_from_stream(stream_iter)params=self._get_invocation_params(stop=stop,**kwargs)response=self.client.chat.completions.create(messages=[{"role":get_role(message),"content":message.content,}formessageinmessages],stream=False,model=self.model,**params,)message=AIMessage(content=response.choices[0].message.content)returnChatResult(generations=[ChatGeneration(message=message)])asyncdef_agenerate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[AsyncCallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:ifself.streaming:stream_iter=self._astream(messages,stop=stop,run_manager=run_manager,**kwargs)returnawaitagenerate_from_stream(stream_iter)params=self._get_invocation_params(stop=stop,**kwargs)response=awaitself.async_client.chat.completions.create(messages=[{"role":get_role(message),"content":message.content,}formessageinmessages],stream=False,model=self.model,**params,)message=AIMessage(content=response.choices[0].message.content)returnChatResult(generations=[ChatGeneration(message=message)])