Source code for langchain_community.chat_models.gigachat
from__future__importannotationsimportloggingfromtypingimport(TYPE_CHECKING,Any,AsyncIterator,Dict,Iterator,List,Mapping,Optional,Type,)fromlangchain_core.callbacksimport(AsyncCallbackManagerForLLMRun,CallbackManagerForLLMRun,)fromlangchain_core.language_models.chat_modelsimport(BaseChatModel,agenerate_from_stream,generate_from_stream,)fromlangchain_core.messagesimport(AIMessage,AIMessageChunk,BaseMessage,BaseMessageChunk,ChatMessage,ChatMessageChunk,FunctionMessage,FunctionMessageChunk,HumanMessage,HumanMessageChunk,SystemMessage,SystemMessageChunk,)fromlangchain_core.outputsimportChatGeneration,ChatGenerationChunk,ChatResultfromlangchain_community.llms.gigachatimport_BaseGigaChatifTYPE_CHECKING:importgigachat.modelsasgmlogger=logging.getLogger(__name__)def_convert_dict_to_message(message:gm.Messages)->BaseMessage:fromgigachat.modelsimportFunctionCall,MessagesRoleadditional_kwargs:Dict={}iffunction_call:=message.function_call:ifisinstance(function_call,FunctionCall):additional_kwargs["function_call"]=dict(function_call)elifisinstance(function_call,dict):additional_kwargs["function_call"]=function_callifmessage.role==MessagesRole.SYSTEM:returnSystemMessage(content=message.content)elifmessage.role==MessagesRole.USER:returnHumanMessage(content=message.content)elifmessage.role==MessagesRole.ASSISTANT:returnAIMessage(content=message.content,additional_kwargs=additional_kwargs)else:raiseTypeError(f"Got unknown role {message.role}{message}")def_convert_message_to_dict(message:gm.BaseMessage)->gm.Messages:fromgigachat.modelsimportMessages,MessagesRoleifisinstance(message,SystemMessage):returnMessages(role=MessagesRole.SYSTEM,content=message.content)elifisinstance(message,HumanMessage):returnMessages(role=MessagesRole.USER,content=message.content)elifisinstance(message,AIMessage):returnMessages(role=MessagesRole.ASSISTANT,content=message.content,function_call=message.additional_kwargs.get("function_call",None),)elifisinstance(message,ChatMessage):returnMessages(role=MessagesRole(message.role),content=message.content)elifisinstance(message,FunctionMessage):returnMessages(role=MessagesRole.FUNCTION,content=message.content)else:raiseTypeError(f"Got unknown type {message}")def_convert_delta_to_message_chunk(_dict:Mapping[str,Any],default_class:Type[BaseMessageChunk])->BaseMessageChunk:role=_dict.get("role")content=_dict.get("content")or""additional_kwargs:Dict={}if_dict.get("function_call"):function_call=dict(_dict["function_call"])if"name"infunction_callandfunction_call["name"]isNone:function_call["name"]=""additional_kwargs["function_call"]=function_callifrole=="user"ordefault_class==HumanMessageChunk:returnHumanMessageChunk(content=content)elifrole=="assistant"ordefault_class==AIMessageChunk:returnAIMessageChunk(content=content,additional_kwargs=additional_kwargs)elifrole=="system"ordefault_class==SystemMessageChunk:returnSystemMessageChunk(content=content)elifrole=="function"ordefault_class==FunctionMessageChunk:returnFunctionMessageChunk(content=content,name=_dict["name"])elifroleordefault_class==ChatMessageChunk:returnChatMessageChunk(content=content,role=role)# type: ignore[arg-type]else:returndefault_class(content=content)# type: ignore[call-arg]
[docs]classGigaChat(_BaseGigaChat,BaseChatModel):"""`GigaChat` large language models API. To use, you should pass login and password to access GigaChat API or use token. Example: .. code-block:: python from langchain_community.chat_models import GigaChat giga = GigaChat(credentials=..., scope=..., verify_ssl_certs=False) """def_build_payload(self,messages:List[BaseMessage],**kwargs:Any)->gm.Chat:fromgigachat.modelsimportChatpayload=Chat(messages=[_convert_message_to_dict(m)forminmessages],)payload.functions=kwargs.get("functions",None)payload.model=self.modelifself.profanity_checkisnotNone:payload.profanity_check=self.profanity_checkifself.temperatureisnotNone:payload.temperature=self.temperatureifself.top_pisnotNone:payload.top_p=self.top_pifself.max_tokensisnotNone:payload.max_tokens=self.max_tokensifself.repetition_penaltyisnotNone:payload.repetition_penalty=self.repetition_penaltyifself.update_intervalisnotNone:payload.update_interval=self.update_intervalifself.verbose:logger.warning("Giga request: %s",payload.dict())returnpayloaddef_create_chat_result(self,response:Any)->ChatResult:generations=[]forresinresponse.choices:message=_convert_dict_to_message(res.message)finish_reason=res.finish_reasongen=ChatGeneration(message=message,generation_info={"finish_reason":finish_reason},)generations.append(gen)iffinish_reason!="stop":logger.warning("Giga generation stopped with reason: %s",finish_reason,)ifself.verbose:logger.warning("Giga response: %s",message.content)llm_output={"token_usage":response.usage,"model_name":response.model}returnChatResult(generations=generations,llm_output=llm_output)def_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,stream:Optional[bool]=None,**kwargs:Any,)->ChatResult:should_stream=streamifstreamisnotNoneelseself.streamingifshould_stream:stream_iter=self._stream(messages,stop=stop,run_manager=run_manager,**kwargs)returngenerate_from_stream(stream_iter)payload=self._build_payload(messages,**kwargs)response=self._client.chat(payload)returnself._create_chat_result(response)asyncdef_agenerate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[AsyncCallbackManagerForLLMRun]=None,stream:Optional[bool]=None,**kwargs:Any,)->ChatResult:should_stream=streamifstreamisnotNoneelseself.streamingifshould_stream:stream_iter=self._astream(messages,stop=stop,run_manager=run_manager,**kwargs)returnawaitagenerate_from_stream(stream_iter)payload=self._build_payload(messages,**kwargs)response=awaitself._client.achat(payload)returnself._create_chat_result(response)def_stream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:payload=self._build_payload(messages,**kwargs)forchunkinself._client.stream(payload):ifnotisinstance(chunk,dict):chunk=chunk.dict()iflen(chunk["choices"])==0:continuechoice=chunk["choices"][0]content=choice.get("delta",{}).get("content",{})chunk=_convert_delta_to_message_chunk(choice["delta"],AIMessageChunk)finish_reason=choice.get("finish_reason")generation_info=(dict(finish_reason=finish_reason)iffinish_reasonisnotNoneelseNone)ifrun_manager:run_manager.on_llm_new_token(content)yieldChatGenerationChunk(message=chunk,generation_info=generation_info)asyncdef_astream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[AsyncCallbackManagerForLLMRun]=None,**kwargs:Any,)->AsyncIterator[ChatGenerationChunk]:payload=self._build_payload(messages,**kwargs)asyncforchunkinself._client.astream(payload):ifnotisinstance(chunk,dict):chunk=chunk.dict()iflen(chunk["choices"])==0:continuechoice=chunk["choices"][0]content=choice.get("delta",{}).get("content",{})chunk=_convert_delta_to_message_chunk(choice["delta"],AIMessageChunk)finish_reason=choice.get("finish_reason")generation_info=(dict(finish_reason=finish_reason)iffinish_reasonisnotNoneelseNone)ifrun_manager:awaitrun_manager.on_llm_new_token(content)yieldChatGenerationChunk(message=chunk,generation_info=generation_info)