Source code for langchain_community.chat_models.hunyuan
importjsonimportloggingfromtypingimportAny,Dict,Iterator,List,Mapping,Optional,Typefromlangchain_core.callbacksimportCallbackManagerForLLMRunfromlangchain_core.language_models.chat_modelsimport(BaseChatModel,generate_from_stream,)fromlangchain_core.messagesimport(AIMessage,AIMessageChunk,BaseMessage,BaseMessageChunk,ChatMessage,ChatMessageChunk,HumanMessage,HumanMessageChunk,SystemMessage,)fromlangchain_core.outputsimportChatGeneration,ChatGenerationChunk,ChatResultfromlangchain_core.pydantic_v1importField,SecretStr,root_validatorfromlangchain_core.utilsimport(convert_to_secret_str,get_from_dict_or_env,get_pydantic_field_names,pre_init,)logger=logging.getLogger(__name__)def_convert_message_to_dict(message:BaseMessage)->dict:message_dict:Dict[str,Any]ifisinstance(message,ChatMessage):message_dict={"Role":message.role,"Content":message.content}elifisinstance(message,SystemMessage):message_dict={"Role":"system","Content":message.content}elifisinstance(message,HumanMessage):message_dict={"Role":"user","Content":message.content}elifisinstance(message,AIMessage):message_dict={"Role":"assistant","Content":message.content}else:raiseTypeError(f"Got unknown type {message}")returnmessage_dictdef_convert_dict_to_message(_dict:Mapping[str,Any])->BaseMessage:role=_dict["Role"]ifrole=="system":returnSystemMessage(content=_dict.get("Content","")or"")elifrole=="user":returnHumanMessage(content=_dict["Content"])elifrole=="assistant":returnAIMessage(content=_dict.get("Content","")or"")else:returnChatMessage(content=_dict["Content"],role=role)def_convert_delta_to_message_chunk(_dict:Mapping[str,Any],default_class:Type[BaseMessageChunk])->BaseMessageChunk:role=_dict.get("Role")content=_dict.get("Content")or""ifrole=="user"ordefault_class==HumanMessageChunk:returnHumanMessageChunk(content=content)elifrole=="assistant"ordefault_class==AIMessageChunk:returnAIMessageChunk(content=content)elifroleordefault_class==ChatMessageChunk:returnChatMessageChunk(content=content,role=role)# type: ignore[arg-type]else:returndefault_class(content=content)# type: ignore[call-arg]def_create_chat_result(response:Mapping[str,Any])->ChatResult:generations=[]forchoiceinresponse["Choices"]:message=_convert_dict_to_message(choice["Message"])message.id=response.get("Id","")generations.append(ChatGeneration(message=message))token_usage=response["Usage"]llm_output={"token_usage":token_usage}returnChatResult(generations=generations,llm_output=llm_output)
[docs]classChatHunyuan(BaseChatModel):"""Tencent Hunyuan chat models API by Tencent. For more information, see https://cloud.tencent.com/document/product/1729 """@propertydeflc_secrets(self)->Dict[str,str]:return{"hunyuan_app_id":"HUNYUAN_APP_ID","hunyuan_secret_id":"HUNYUAN_SECRET_ID","hunyuan_secret_key":"HUNYUAN_SECRET_KEY",}@propertydeflc_serializable(self)->bool:returnTruehunyuan_app_id:Optional[int]=None"""Hunyuan App ID"""hunyuan_secret_id:Optional[str]=None"""Hunyuan Secret ID"""hunyuan_secret_key:Optional[SecretStr]=None"""Hunyuan Secret Key"""streaming:bool=False"""Whether to stream the results or not."""request_timeout:int=60"""Timeout for requests to Hunyuan API. Default is 60 seconds."""temperature:float=1.0"""What sampling temperature to use."""top_p:float=1.0"""What probability mass to use."""model:str="hunyuan-lite""""What Model to use. Optional model: - hunyuan-lite - hunyuan-standard - hunyuan-standard-256K - hunyuan-pro - hunyuan-code - hunyuan-role - hunyuan-functioncall - hunyuan-vision """stream_moderation:bool=False"""Whether to review the results or not when streaming is true."""enable_enhancement:bool=True"""Whether to enhancement the results or not."""model_kwargs:Dict[str,Any]=Field(default_factory=dict)"""Holds any model parameters valid for API call not explicitly specified."""classConfig:allow_population_by_field_name=True@root_validator(pre=True)defbuild_extra(cls,values:Dict[str,Any])->Dict[str,Any]:"""Build extra kwargs from additional params that were passed in."""all_required_field_names=get_pydantic_field_names(cls)extra=values.get("model_kwargs",{})forfield_nameinlist(values):iffield_nameinextra:raiseValueError(f"Found {field_name} supplied twice.")iffield_namenotinall_required_field_names:logger.warning(f"""WARNING! {field_name} is not default parameter.{field_name} was transferred to model_kwargs. Please confirm that {field_name} is what you intended.""")extra[field_name]=values.pop(field_name)invalid_model_kwargs=all_required_field_names.intersection(extra.keys())ifinvalid_model_kwargs:raiseValueError(f"Parameters {invalid_model_kwargs} should be specified explicitly. "f"Instead they were passed in as part of `model_kwargs` parameter.")values["model_kwargs"]=extrareturnvalues@pre_initdefvalidate_environment(cls,values:Dict)->Dict:values["hunyuan_app_id"]=get_from_dict_or_env(values,"hunyuan_app_id","HUNYUAN_APP_ID",)values["hunyuan_secret_id"]=get_from_dict_or_env(values,"hunyuan_secret_id","HUNYUAN_SECRET_ID",)values["hunyuan_secret_key"]=convert_to_secret_str(get_from_dict_or_env(values,"hunyuan_secret_key","HUNYUAN_SECRET_KEY",))returnvalues@propertydef_default_params(self)->Dict[str,Any]:"""Get the default parameters for calling Hunyuan API."""normal_params={"Temperature":self.temperature,"TopP":self.top_p,"Model":self.model,"Stream":self.streaming,"StreamModeration":self.stream_moderation,"EnableEnhancement":self.enable_enhancement,}return{**normal_params,**self.model_kwargs}def_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:ifself.streaming:stream_iter=self._stream(messages=messages,stop=stop,run_manager=run_manager,**kwargs)returngenerate_from_stream(stream_iter)res=self._chat(messages,**kwargs)return_create_chat_result(json.loads(res.to_json_string()))def_stream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:res=self._chat(messages,**kwargs)default_chunk_class=AIMessageChunkforchunkinres:chunk=chunk.get("data","")iflen(chunk)==0:continueresponse=json.loads(chunk)if"error"inresponse:raiseValueError(f"Error from Hunyuan api response: {response}")forchoiceinresponse["Choices"]:chunk=_convert_delta_to_message_chunk(choice["Delta"],default_chunk_class)chunk.id=response.get("Id","")default_chunk_class=chunk.__class__cg_chunk=ChatGenerationChunk(message=chunk)ifrun_manager:run_manager.on_llm_new_token(chunk.content,chunk=cg_chunk)yieldcg_chunkdef_chat(self,messages:List[BaseMessage],**kwargs:Any)->Any:ifself.hunyuan_secret_keyisNone:raiseValueError("Hunyuan secret key is not set.")try:fromtencentcloud.commonimportcredentialfromtencentcloud.hunyuan.v20230901importhunyuan_client,modelsexceptImportError:raiseImportError("Could not import tencentcloud python package. ""Please install it with `pip install tencentcloud-sdk-python`.")parameters={**self._default_params,**kwargs}cred=credential.Credential(self.hunyuan_secret_id,str(self.hunyuan_secret_key.get_secret_value()))client=hunyuan_client.HunyuanClient(cred,"")req=models.ChatCompletionsRequest()params={"Messages":[_convert_message_to_dict(m)forminmessages],**parameters,}req.from_json_string(json.dumps(params))resp=client.ChatCompletions(req)returnresp@propertydef_llm_type(self)->str:return"hunyuan-chat"