Source code for langchain_community.chat_models.naver
importloggingfromtypingimport(Any,AsyncContextManager,AsyncIterator,Callable,Dict,Iterator,List,Optional,Tuple,Type,Union,cast,)importhttpxfromhttpx_sseimportSSEErrorfromlangchain_core.callbacksimport(AsyncCallbackManagerForLLMRun,CallbackManagerForLLMRun,)fromlangchain_core.language_models.chat_modelsimportBaseChatModel,LangSmithParamsfromlangchain_core.language_models.llmsimportcreate_base_retry_decoratorfromlangchain_core.messagesimport(AIMessage,AIMessageChunk,BaseMessage,BaseMessageChunk,ChatMessage,ChatMessageChunk,HumanMessage,HumanMessageChunk,SystemMessage,SystemMessageChunk,)fromlangchain_core.outputsimportChatGeneration,ChatGenerationChunk,ChatResultfromlangchain_core.utilsimportconvert_to_secret_str,get_from_envfrompydanticimport(AliasChoices,ConfigDict,Field,SecretStr,model_validator,)fromtyping_extensionsimportSelf_DEFAULT_BASE_URL="https://clovastudio.stream.ntruss.com"logger=logging.getLogger(__name__)def_convert_chunk_to_message_chunk(sse:Any,default_class:Type[BaseMessageChunk])->BaseMessageChunk:sse_data=sse.json()ifsse.event=="result":response_metadata=_sse_data_to_response_metadata(sse_data)returnAIMessageChunk(content="",response_metadata=response_metadata)message=sse_data.get("message")role=message.get("role")content=message.get("content")or""ifrole=="user"ordefault_class==HumanMessageChunk:returnHumanMessageChunk(content=content)elifrole=="assistant"ordefault_class==AIMessageChunk:returnAIMessageChunk(content=content)elifrole=="system"ordefault_class==SystemMessageChunk:returnSystemMessageChunk(content=content)elifroleordefault_class==ChatMessageChunk:returnChatMessageChunk(content=content,role=role)else:returndefault_class(content=content)# type: ignore[call-arg]def_sse_data_to_response_metadata(sse_data:Dict)->Dict[str,Any]:response_metadata={}if"stopReason"insse_data:response_metadata["stop_reason"]=sse_data["stopReason"]if"inputLength"insse_data:response_metadata["input_length"]=sse_data["inputLength"]if"outputLength"insse_data:response_metadata["output_length"]=sse_data["outputLength"]if"seed"insse_data:response_metadata["seed"]=sse_data["seed"]if"aiFilter"insse_data:response_metadata["ai_filter"]=sse_data["aiFilter"]returnresponse_metadatadef_convert_message_to_naver_chat_message(message:BaseMessage,)->Dict:ifisinstance(message,ChatMessage):returndict(role=message.role,content=message.content)elifisinstance(message,HumanMessage):returndict(role="user",content=message.content)elifisinstance(message,SystemMessage):returndict(role="system",content=message.content)elifisinstance(message,AIMessage):returndict(role="assistant",content=message.content)else:logger.warning("FunctionMessage, ToolMessage not yet supported ""(https://api.ncloud-docs.com/docs/clovastudio-chatcompletions)")raiseValueError(f"Got unknown type {message}")def_convert_naver_chat_message_to_message(_message:Dict,)->BaseMessage:role=_message["role"]assertrolein("assistant","system","user",),f"Expected role to be 'assistant', 'system', 'user', got {role}"content=cast(str,_message["content"])additional_kwargs:Dict={}ifrole=="user":returnHumanMessage(content=content,additional_kwargs=additional_kwargs,)elifrole=="system":returnSystemMessage(content=content,additional_kwargs=additional_kwargs,)elifrole=="assistant":returnAIMessage(content=content,additional_kwargs=additional_kwargs,)else:logger.warning("Got unknown role %s",role)raiseValueError(f"Got unknown role {role}")asyncdef_aiter_sse(event_source_mgr:AsyncContextManager[Any],)->AsyncIterator[Dict]:"""Iterate over the server-sent events."""asyncwithevent_source_mgrasevent_source:await_araise_on_error(event_source.response)asyncforsseinevent_source.aiter_sse():event_data=sse.json()ifsse.event=="signal"andevent_data.get("data",{})=="[DONE]":returnifsse.event=="error":raiseSSEError(message=sse.data)yieldssedef_raise_on_error(response:httpx.Response)->None:"""Raise an error if the response is an error."""ifhttpx.codes.is_error(response.status_code):error_message=response.read().decode("utf-8")raisehttpx.HTTPStatusError(f"Error response {response.status_code} "f"while fetching {response.url}: {error_message}",request=response.request,response=response,)asyncdef_araise_on_error(response:httpx.Response)->None:"""Raise an error if the response is an error."""ifhttpx.codes.is_error(response.status_code):error_message=(awaitresponse.aread()).decode("utf-8")raisehttpx.HTTPStatusError(f"Error response {response.status_code} "f"while fetching {response.url}: {error_message}",request=response.request,response=response,)
[docs]classChatClovaX(BaseChatModel):"""`NCP ClovaStudio` Chat Completion API. following environment variables set or passed in constructor in lower case: - ``NCP_CLOVASTUDIO_API_KEY`` - ``NCP_APIGW_API_KEY`` Example: .. code-block:: python from langchain_core.messages import HumanMessage from langchain_community import ChatClovaX model = ChatClovaX() model.invoke([HumanMessage(content="Come up with 10 names for a song about parrots.")]) """# noqa: E501client:Optional[httpx.Client]=Field(default=None)#: :meta private:async_client:Optional[httpx.AsyncClient]=Field(default=None)#: :meta private:model_name:str=Field(default="HCX-003",validation_alias=AliasChoices("model_name","model"),description="NCP ClovaStudio chat model name",)task_id:Optional[str]=Field(default=None,description="NCP Clova Studio chat model tuning task ID")service_app:bool=Field(default=False,description="false: use testapp, true: use service app on NCP Clova Studio",)ncp_clovastudio_api_key:Optional[SecretStr]=Field(default=None,alias="api_key")"""Automatically inferred from env are `NCP_CLOVASTUDIO_API_KEY` if not provided."""ncp_apigw_api_key:Optional[SecretStr]=Field(default=None,alias="apigw_api_key")"""Automatically inferred from env are `NCP_APIGW_API_KEY` if not provided."""base_url:str=Field(default="",alias="base_url")""" Automatically inferred from env are `NCP_CLOVASTUDIO_API_BASE_URL` if not provided. """temperature:Optional[float]=Field(gt=0.0,le=1.0,default=0.5)top_k:Optional[int]=Field(ge=0,le=128,default=0)top_p:Optional[float]=Field(ge=0,le=1.0,default=0.8)repeat_penalty:Optional[float]=Field(gt=0.0,le=10,default=5.0)max_tokens:Optional[int]=Field(ge=0,le=4096,default=100)stop_before:Optional[list[str]]=Field(default=None,alias="stop")include_ai_filters:Optional[bool]=Field(default=False)seed:Optional[int]=Field(ge=0,le=4294967295,default=0)timeout:int=Field(gt=0,default=90)max_retries:int=Field(ge=1,default=2)model_config=ConfigDict(populate_by_name=True,protected_namespaces=())@propertydef_default_params(self)->Dict[str,Any]:"""Get the default parameters for calling the API."""defaults={"temperature":self.temperature,"topK":self.top_k,"topP":self.top_p,"repeatPenalty":self.repeat_penalty,"maxTokens":self.max_tokens,"stopBefore":self.stop_before,"includeAiFilters":self.include_ai_filters,"seed":self.seed,}filtered={k:vfork,vindefaults.items()ifvisnotNone}returnfiltered@propertydef_identifying_params(self)->Dict[str,Any]:"""Get the identifying parameters."""self._default_params["model_name"]=self.model_namereturnself._default_params@propertydeflc_secrets(self)->Dict[str,str]:ifnotself._is_new_api_key():return{"ncp_clovastudio_api_key":"NCP_CLOVASTUDIO_API_KEY",}else:return{"ncp_clovastudio_api_key":"NCP_CLOVASTUDIO_API_KEY","ncp_apigw_api_key":"NCP_APIGW_API_KEY",}@propertydef_llm_type(self)->str:"""Return type of chat model."""return"chat-naver"def_get_ls_params(self,stop:Optional[List[str]]=None,**kwargs:Any)->LangSmithParams:"""Get the parameters used to invoke the model."""params=super()._get_ls_params(stop=stop,**kwargs)params["ls_provider"]="naver"returnparams@propertydef_client_params(self)->Dict[str,Any]:"""Get the parameters used for the client."""returnself._default_params@propertydef_api_url(self)->str:"""GET chat completion api url"""app_type="serviceapp"ifself.service_appelse"testapp"ifself.task_id:return(f"{self.base_url}/{app_type}/v1/tasks/{self.task_id}/chat-completions")else:returnf"{self.base_url}/{app_type}/v1/chat-completions/{self.model_name}"@model_validator(mode="after")defvalidate_model_after(self)->Self:ifnot(self.model_nameorself.task_id):raiseValueError("either model_name or task_id must be assigned a value.")ifnotself.ncp_clovastudio_api_key:self.ncp_clovastudio_api_key=convert_to_secret_str(get_from_env("ncp_clovastudio_api_key","NCP_CLOVASTUDIO_API_KEY"))ifnotself._is_new_api_key():self._init_fields_on_old_api_key()ifnotself.base_url:self.base_url=get_from_env("base_url","NCP_CLOVASTUDIO_API_BASE_URL",_DEFAULT_BASE_URL)ifnotself.client:self.client=httpx.Client(base_url=self.base_url,headers=self.default_headers(),timeout=self.timeout,)ifnotself.async_client:self.async_client=httpx.AsyncClient(base_url=self.base_url,headers=self.default_headers(),timeout=self.timeout,)returnselfdef_is_new_api_key(self)->bool:ifself.ncp_clovastudio_api_key:returnself.ncp_clovastudio_api_key.get_secret_value().startswith("nv-")else:returnFalsedef_init_fields_on_old_api_key(self)->None:ifnotself.ncp_apigw_api_key:self.ncp_apigw_api_key=convert_to_secret_str(get_from_env("ncp_apigw_api_key","NCP_APIGW_API_KEY",""))
[docs]defdefault_headers(self)->Dict[str,Any]:headers={"Content-Type":"application/json","Accept":"application/json",}clovastudio_api_key=(self.ncp_clovastudio_api_key.get_secret_value()ifself.ncp_clovastudio_api_keyelseNone)ifself._is_new_api_key():### headers on new api keyheaders["Authorization"]=f"Bearer {clovastudio_api_key}"else:### headers on old api keyifclovastudio_api_key:headers["X-NCP-CLOVASTUDIO-API-KEY"]=clovastudio_api_keyapigw_api_key=(self.ncp_apigw_api_key.get_secret_value()ifself.ncp_apigw_api_keyelseNone)ifapigw_api_key:headers["X-NCP-APIGW-API-KEY"]=apigw_api_keyreturnheaders
def_create_message_dicts(self,messages:List[BaseMessage],stop:Optional[List[str]])->Tuple[List[Dict],Dict[str,Any]]:params=self._client_paramsifstopisnotNoneand"stopBefore"inparams:params["stopBefore"]=stopmessage_dicts=[_convert_message_to_naver_chat_message(m)forminmessages]returnmessage_dicts,paramsdef_completion_with_retry(self,**kwargs:Any)->Any:fromhttpx_sseimport(ServerSentEvent,connect_sse,)if"stream"notinkwargs:kwargs["stream"]=Falsestream=kwargs["stream"]client=cast(httpx.Client,self.client)ifstream:defiter_sse()->Iterator[ServerSentEvent]:withconnect_sse(client,"POST",self._api_url,json=kwargs)asevent_source:_raise_on_error(event_source.response)forsseinevent_source.iter_sse():event_data=sse.json()if(sse.event=="signal"andevent_data.get("data",{})=="[DONE]"):returnifsse.event=="error":raiseSSEError(message=sse.data)yieldssereturniter_sse()else:response=client.post(url=self._api_url,json=kwargs)_raise_on_error(response)returnresponse.json()asyncdef_acompletion_with_retry(self,run_manager:Optional[AsyncCallbackManagerForLLMRun]=None,**kwargs:Any,)->Any:fromhttpx_sseimportaconnect_sse"""Use tenacity to retry the async completion call."""retry_decorator=_create_retry_decorator(self,run_manager=run_manager)@retry_decoratorasyncdef_completion_with_retry(**kwargs:Any)->Any:if"stream"notinkwargs:kwargs["stream"]=Falsestream=kwargs["stream"]async_client=cast(httpx.AsyncClient,self.async_client)ifstream:event_source=aconnect_sse(async_client,"POST",self._api_url,json=kwargs)return_aiter_sse(event_source)else:response=awaitasync_client.post(url=self._api_url,json=kwargs)await_araise_on_error(response)returnresponse.json()returnawait_completion_with_retry(**kwargs)def_create_chat_result(self,response:Dict)->ChatResult:generations=[]result=response.get("result",{})msg=result.get("message",{})message=_convert_naver_chat_message_to_message(msg)ifisinstance(message,AIMessage):message.usage_metadata={"input_tokens":result.get("inputLength"),"output_tokens":result.get("outputLength"),"total_tokens":result.get("inputLength")+result.get("outputLength"),}gen=ChatGeneration(message=message,)generations.append(gen)llm_output={"stop_reason":result.get("stopReason"),"input_length":result.get("inputLength"),"output_length":result.get("outputLength"),"seed":result.get("seed"),"ai_filter":result.get("aiFilter"),}returnChatResult(generations=generations,llm_output=llm_output)def_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs}response=self._completion_with_retry(messages=message_dicts,**params)returnself._create_chat_result(response)def_stream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs,"stream":True}default_chunk_class:Type[BaseMessageChunk]=AIMessageChunkforsseinself._completion_with_retry(messages=message_dicts,run_manager=run_manager,**params):new_chunk=_convert_chunk_to_message_chunk(sse,default_chunk_class)default_chunk_class=new_chunk.__class__gen_chunk=ChatGenerationChunk(message=new_chunk)ifrun_manager:run_manager.on_llm_new_token(token=cast(str,new_chunk.content),chunk=gen_chunk)yieldgen_chunkasyncdef_agenerate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[AsyncCallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs}response=awaitself._acompletion_with_retry(messages=message_dicts,run_manager=run_manager,**params)returnself._create_chat_result(response)asyncdef_astream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[AsyncCallbackManagerForLLMRun]=None,**kwargs:Any,)->AsyncIterator[ChatGenerationChunk]:message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs,"stream":True}default_chunk_class:Type[BaseMessageChunk]=AIMessageChunkasyncforchunkinawaitself._acompletion_with_retry(messages=message_dicts,run_manager=run_manager,**params):new_chunk=_convert_chunk_to_message_chunk(chunk,default_chunk_class)default_chunk_class=new_chunk.__class__gen_chunk=ChatGenerationChunk(message=new_chunk)ifrun_manager:awaitrun_manager.on_llm_new_token(token=cast(str,new_chunk.content),chunk=gen_chunk)yieldgen_chunk
def_create_retry_decorator(llm:ChatClovaX,run_manager:Optional[Union[AsyncCallbackManagerForLLMRun,CallbackManagerForLLMRun]]=None,)->Callable[[Any],Any]:"""Returns a tenacity retry decorator, preconfigured to handle exceptions"""errors=[httpx.RequestError,httpx.StreamError]returncreate_base_retry_decorator(error_types=errors,max_retries=llm.max_retries,run_manager=run_manager)