Source code for langchain_experimental.llms.llamaapi
importjsonimportloggingfromtypingimport(Any,Dict,List,Mapping,Optional,Tuple,)fromlangchain.schemaimport(ChatGeneration,ChatResult,)fromlangchain_core.callbacks.managerimportCallbackManagerForLLMRunfromlangchain_core.language_modelsimportBaseChatModelfromlangchain_core.messagesimport(AIMessage,BaseMessage,ChatMessage,FunctionMessage,HumanMessage,SystemMessage,)logger=logging.getLogger(__name__)def_convert_dict_to_message(_dict:Mapping[str,Any])->BaseMessage:role=_dict["role"]ifrole=="user":returnHumanMessage(content=_dict["content"])elifrole=="assistant":# Fix for azure# Also OpenAI returns None for tool invocationscontent=_dict.get("content")or""if_dict.get("function_call"):_dict["function_call"]["arguments"]=json.dumps(_dict["function_call"]["arguments"])additional_kwargs={"function_call":dict(_dict["function_call"])}else:additional_kwargs={}returnAIMessage(content=content,additional_kwargs=additional_kwargs)elifrole=="system":returnSystemMessage(content=_dict["content"])elifrole=="function":returnFunctionMessage(content=_dict["content"],name=_dict["name"])else:returnChatMessage(content=_dict["content"],role=role)def_convert_message_to_dict(message:BaseMessage)->dict:ifisinstance(message,ChatMessage):message_dict={"role":message.role,"content":message.content}elifisinstance(message,HumanMessage):message_dict={"role":"user","content":message.content}elifisinstance(message,AIMessage):message_dict={"role":"assistant","content":message.content}if"function_call"inmessage.additional_kwargs:message_dict["function_call"]=message.additional_kwargs["function_call"]elifisinstance(message,SystemMessage):message_dict={"role":"system","content":message.content}elifisinstance(message,FunctionMessage):message_dict={"role":"function","content":message.content,"name":message.name,}else:raiseValueError(f"Got unknown type {message}")if"name"inmessage.additional_kwargs:message_dict["name"]=message.additional_kwargs["name"]returnmessage_dict
[docs]classChatLlamaAPI(BaseChatModel):"""Chat model using the Llama API."""client:Any#: :meta private:def_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:message_dicts,params=self._create_message_dicts(messages,stop)_params={"messages":message_dicts}final_params={**params,**kwargs,**_params}response=self.client.run(final_params).json()returnself._create_chat_result(response)def_create_message_dicts(self,messages:List[BaseMessage],stop:Optional[List[str]])->Tuple[List[Dict[str,Any]],Dict[str,Any]]:params=dict(self._client_params)ifstopisnotNone:if"stop"inparams:raiseValueError("`stop` found in both the input and default params.")params["stop"]=stopmessage_dicts=[_convert_message_to_dict(m)forminmessages]returnmessage_dicts,paramsdef_create_chat_result(self,response:Mapping[str,Any])->ChatResult:generations=[]forresinresponse["choices"]:message=_convert_dict_to_message(res["message"])gen=ChatGeneration(message=message,generation_info=dict(finish_reason=res.get("finish_reason")),)generations.append(gen)returnChatResult(generations=generations)@propertydef_client_params(self)->Mapping[str,Any]:"""Get the parameters used for the client."""return{}@propertydef_llm_type(self)->str:"""Return type of chat model."""return"llama-api"