[docs]classBaichuanLLM(LLM):# TODO: Adding streaming support."""Baichuan large language models."""model:str="Baichuan2-Turbo-192k"""" Other models are available at https://platform.baichuan-ai.com/docs/api. """temperature:float=0.3top_p:float=0.95timeout:int=60model_kwargs:Dict[str,Any]=Field(default_factory=dict)baichuan_api_host:Optional[str]=Nonebaichuan_api_key:Optional[SecretStr]=None
@propertydef_default_params(self)->Dict[str,Any]:return{"model":self.model,"temperature":self.temperature,"top_p":self.top_p,**self.model_kwargs,}def_post(self,request:Any)->Any:headers={"Content-Type":"application/json","Authorization":f"Bearer {self.baichuan_api_key.get_secret_value()}",# type: ignore[union-attr]}try:response=requests.post(self.baichuan_api_host,# type: ignore[arg-type]headers=headers,json=request,timeout=self.timeout,)ifresponse.status_code==200:parsed_json=json.loads(response.text)returnparsed_json["choices"][0]["message"]["content"]else:response.raise_for_status()exceptExceptionase:raiseValueError(f"An error has occurred: {e}")def_call(self,prompt:str,stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->str:request=self._default_paramsrequest["messages"]=[{"role":"user","content":prompt}]request.update(kwargs)text=self._post(request)ifstopisnotNone:text=enforce_stop_tokens(text,stop)returntext@propertydef_llm_type(self)->str:"""Return type of chat_model."""return"baichuan-llm"