Source code for langchain_community.chat_models.oci_generative_ai
importjsonimportreimportuuidfromabcimportABC,abstractmethodfromtypingimport(Any,Callable,Dict,Iterator,List,Mapping,Optional,Sequence,Type,Union,)fromlangchain_core.callbacksimportCallbackManagerForLLMRunfromlangchain_core.language_modelsimportLanguageModelInputfromlangchain_core.language_models.chat_modelsimport(BaseChatModel,generate_from_stream,)fromlangchain_core.messagesimport(AIMessage,AIMessageChunk,BaseMessage,ChatMessage,HumanMessage,SystemMessage,ToolCall,ToolMessage,)fromlangchain_core.messages.toolimportToolCallChunkfromlangchain_core.output_parsers.baseimportOutputParserLikefromlangchain_core.output_parsers.openai_toolsimport(JsonOutputKeyToolsParser,PydanticToolsParser,)fromlangchain_core.outputsimportChatGeneration,ChatGenerationChunk,ChatResultfromlangchain_core.pydantic_v1importBaseModelfromlangchain_core.runnablesimportRunnablefromlangchain_core.toolsimportBaseToolfromlangchain_core.utils.function_callingimportconvert_to_openai_functionfromlangchain_community.llms.oci_generative_aiimportOCIGenAIBasefromlangchain_community.llms.utilsimportenforce_stop_tokensCUSTOM_ENDPOINT_PREFIX="ocid1.generativeaiendpoint"JSON_TO_PYTHON_TYPES={"string":"str","number":"float","boolean":"bool","integer":"int","array":"List","object":"Dict",}def_remove_signature_from_tool_description(name:str,description:str)->str:""" Removes the `{name}{signature} - ` prefix and Args: section from tool description. The signature is usually present for tools created with the @tool decorator, whereas the Args: section may be present in function doc blocks. """description=re.sub(rf"^{name}\(.*?\) -(?:> \w+? -)? ","",description)description=re.sub(r"(?s)(?:\n?\n\s*?)?Args:.*$","",description)returndescriptiondef_format_oci_tool_calls(tool_calls:Optional[List[Any]]=None,)->List[Dict]:""" Formats a OCI GenAI API response into the tool call format used in Langchain. """ifnottool_calls:return[]formatted_tool_calls=[]fortool_callintool_calls:formatted_tool_calls.append({"id":uuid.uuid4().hex[:],"function":{"name":tool_call.name,"arguments":json.dumps(tool_call.parameters),},"type":"function",})returnformatted_tool_callsdef_convert_oci_tool_call_to_langchain(tool_call:Any)->ToolCall:"""Convert a OCI GenAI tool call into langchain_core.messages.ToolCall"""_id=uuid.uuid4().hex[:]returnToolCall(name=tool_call.name,args=tool_call.parameters,id=_id)
[docs]defchat_generation_info(self,response:Any)->Dict[str,Any]:generation_info:Dict[str,Any]={"documents":response.data.chat_response.documents,"citations":response.data.chat_response.citations,"search_queries":response.data.chat_response.search_queries,"is_search_required":response.data.chat_response.is_search_required,"finish_reason":response.data.chat_response.finish_reason,}ifresponse.data.chat_response.tool_calls:# Only populate tool_calls when 1) present on the response and# 2) has one or more calls.generation_info["tool_calls"]=_format_oci_tool_calls(response.data.chat_response.tool_calls)returngeneration_info
[docs]defget_role(self,message:BaseMessage)->str:ifisinstance(message,HumanMessage):return"USER"elifisinstance(message,AIMessage):return"CHATBOT"elifisinstance(message,SystemMessage):return"SYSTEM"elifisinstance(message,ToolMessage):return"TOOL"else:raiseValueError(f"Got unknown type {message}")
[docs]defmessages_to_oci_params(self,messages:Sequence[ChatMessage],**kwargs:Any)->Dict[str,Any]:is_force_single_step=kwargs.get("is_force_single_step")orFalseoci_chat_history=[]formsginmessages[:-1]:ifself.get_role(msg)=="USER"orself.get_role(msg)=="SYSTEM":oci_chat_history.append(self.oci_chat_message[self.get_role(msg)](message=msg.content))elifisinstance(msg,AIMessage):ifmsg.tool_callsandis_force_single_step:continuetool_calls=([self.oci_tool_call(name=tc["name"],parameters=tc["args"])fortcinmsg.tool_calls]ifmsg.tool_callselseNone)msg_content=msg.contentifmsg.contentelse" "oci_chat_history.append(self.oci_chat_message[self.get_role(msg)](message=msg_content,tool_calls=tool_calls))# Get the messages for the current chat turncurrent_chat_turn_messages=[]formessageinmessages[::-1]:current_chat_turn_messages.append(message)ifisinstance(message,HumanMessage):breakcurrent_chat_turn_messages=current_chat_turn_messages[::-1]oci_tool_results:Union[List[Any],None]=[]formessageincurrent_chat_turn_messages:ifisinstance(message,ToolMessage):tool_message=messageprevious_ai_msgs=[messageformessageincurrent_chat_turn_messagesifisinstance(message,AIMessage)andmessage.tool_calls]ifprevious_ai_msgs:previous_ai_msg=previous_ai_msgs[-1]forlc_tool_callinprevious_ai_msg.tool_calls:iflc_tool_call["id"]==tool_message.tool_call_id:tool_result=self.oci_tool_result()tool_result.call=self.oci_tool_call(name=lc_tool_call["name"],parameters=lc_tool_call["args"],)tool_result.outputs=[{"output":tool_message.content}]oci_tool_results.append(tool_result)ifnotoci_tool_results:oci_tool_results=Nonemessage_str=""ifoci_tool_resultselsemessages[-1].contentoci_params={"message":message_str,"chat_history":oci_chat_history,"tool_results":oci_tool_results,"api_format":self.chat_api_format,}return{k:vfork,vinoci_params.items()ifvisnotNone}
[docs]defconvert_to_oci_tool(self,tool:Union[Union[Dict[str,Any],Type[BaseModel],Callable,BaseTool]],)->Dict[str,Any]:""" Convert a BaseTool instance, JSON schema dict, or BaseModel type to a OCI tool. """ifisinstance(tool,BaseTool):returnself.oci_tool(name=tool.name,description=_remove_signature_from_tool_description(tool.name,tool.description),parameter_definitions={p_name:self.oci_tool_param(description=p_def.get("description")if"description"inp_defelse"",type=JSON_TO_PYTHON_TYPES.get(p_def.get("type"),p_def.get("type")),is_required="default"notinp_def,)forp_name,p_defintool.args.items()},)elifisinstance(tool,dict):ifnotall(kintoolforkin("title","description","properties")):raiseValueError("Unsupported dict type. Tool must be passed in as a BaseTool instance, JSON schema dict, or BaseModel type."# noqa: E501)returnself.oci_tool(name=tool.get("title"),description=tool.get("description"),parameter_definitions={p_name:self.oci_tool_param(description=p_def.get("description"),type=JSON_TO_PYTHON_TYPES.get(p_def.get("type"),p_def.get("type")),is_required="default"notinp_def,)forp_name,p_defintool.get("properties",{}).items()},)elif(isinstance(tool,type)andissubclass(tool,BaseModel))orcallable(tool):as_json_schema_function=convert_to_openai_function(tool)parameters=as_json_schema_function.get("parameters",{})properties=parameters.get("properties",{})returnself.oci_tool(name=as_json_schema_function.get("name"),description=as_json_schema_function.get("description",as_json_schema_function.get("name"),),parameter_definitions={p_name:self.oci_tool_param(description=p_def.get("description"),type=JSON_TO_PYTHON_TYPES.get(p_def.get("type"),p_def.get("type")),is_required=p_nameinparameters.get("required",[]),)forp_name,p_definproperties.items()},)else:raiseValueError(f"Unsupported tool type {type(tool)}. Tool must be passed in as a BaseTool instance, JSON schema dict, or BaseModel type."# noqa: E501)
[docs]defget_role(self,message:BaseMessage)->str:# meta only supports alternating user/assistant rolesifisinstance(message,HumanMessage):return"USER"elifisinstance(message,AIMessage):return"ASSISTANT"elifisinstance(message,SystemMessage):return"SYSTEM"else:raiseValueError(f"Got unknown type {message}")
[docs]defconvert_to_oci_tool(self,tool:Union[Union[Dict[str,Any],Type[BaseModel],Callable,BaseTool]],)->Dict[str,Any]:raiseNotImplementedError("Tools not supported for Meta models")
[docs]classChatOCIGenAI(BaseChatModel,OCIGenAIBase):"""ChatOCIGenAI chat model integration. Setup: Install ``langchain-community`` and the ``oci`` sdk. .. code-block:: bash pip install -U langchain-community oci Key init args — completion params: model_id: str Id of the OCIGenAI chat model to use, e.g., cohere.command-r-16k. is_stream: bool Whether to stream back partial progress model_kwargs: Optional[Dict] Keyword arguments to pass to the specific model used, e.g., temperature, max_tokens. Key init args — client params: service_endpoint: str The endpoint URL for the OCIGenAI service, e.g., https://inference.generativeai.us-chicago-1.oci.oraclecloud.com. compartment_id: str The compartment OCID. auth_type: str The authentication type to use, e.g., API_KEY (default), SECURITY_TOKEN, INSTANCE_PRINCIPAL, RESOURCE_PRINCIPAL. auth_profile: Optional[str] The name of the profile in ~/.oci/config, if not specified , DEFAULT will be used. provider: str Provider name of the model. Default to None, will try to be derived from the model_id otherwise, requires user input. See full list of supported init args and their descriptions in the params section. Instantiate: .. code-block:: python from langchain_community.chat_models import ChatOCIGenAI chat = ChatOCIGenAI( model_id="cohere.command-r-16k", service_endpoint="https://inference.generativeai.us-chicago-1.oci.oraclecloud.com", compartment_id="MY_OCID", model_kwargs={"temperature": 0.7, "max_tokens": 500}, ) Invoke: .. code-block:: python messages = [ SystemMessage(content="your are an AI assistant."), AIMessage(content="Hi there human!"), HumanMessage(content="tell me a joke."), ] response = chat.invoke(messages) Stream: .. code-block:: python for r in chat.stream(messages): print(r.content, end="", flush=True) Response metadata .. code-block:: python response = chat.invoke(messages) print(response.response_metadata) """# noqa: E501classConfig:extra="forbid"@propertydef_llm_type(self)->str:"""Return type of llm."""return"oci_generative_ai_chat"@propertydef_provider_map(self)->Mapping[str,Any]:"""Get the provider map"""return{"cohere":CohereProvider(),"meta":MetaProvider(),}@propertydef_provider(self)->Any:"""Get the internal provider object"""returnself._get_provider(provider_map=self._provider_map)def_prepare_request(self,messages:List[BaseMessage],stop:Optional[List[str]],stream:bool,**kwargs:Any,)->Dict[str,Any]:try:fromoci.generative_ai_inferenceimportmodelsexceptImportErrorasex:raiseModuleNotFoundError("Could not import oci python package. ""Please make sure you have the oci package installed.")fromexoci_params=self._provider.messages_to_oci_params(messages,**kwargs)oci_params["is_stream"]=stream_model_kwargs=self.model_kwargsor{}ifstopisnotNone:_model_kwargs[self._provider.stop_sequence_key]=stopchat_params={**_model_kwargs,**kwargs,**oci_params}ifself.model_id.startswith(CUSTOM_ENDPOINT_PREFIX):serving_mode=models.DedicatedServingMode(endpoint_id=self.model_id)else:serving_mode=models.OnDemandServingMode(model_id=self.model_id)request=models.ChatDetails(compartment_id=self.compartment_id,serving_mode=serving_mode,chat_request=self._provider.oci_chat_request(**chat_params),)returnrequest
[docs]defwith_structured_output(self,schema:Union[Dict[Any,Any],Type[BaseModel]],**kwargs:Any,)->Runnable[LanguageModelInput,Union[Dict,BaseModel]]:"""Model wrapper that returns outputs formatted to match the given schema. Args: schema: The output schema as a dict or a Pydantic class. If a Pydantic class then the model output will be an object of that class. If a dict then the model output will be a dict. Returns: A Runnable that takes any ChatModel input and returns either a dict or Pydantic class as output. """llm=self.bind_tools([schema],**kwargs)ifisinstance(schema,type)andissubclass(schema,BaseModel):output_parser:OutputParserLike=PydanticToolsParser(tools=[schema],first_tool_only=True)else:key_name=getattr(self._provider.convert_to_oci_tool(schema),"name")output_parser=JsonOutputKeyToolsParser(key_name=key_name,first_tool_only=True)returnllm|output_parser
def_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:"""Call out to a OCIGenAI chat model. Args: messages: list of LangChain messages stop: Optional list of stop words to use. Returns: LangChain ChatResult Example: .. code-block:: python messages = [ HumanMessage(content="hello!"), AIMessage(content="Hi there human!"), HumanMessage(content="Meow!") ] response = llm.invoke(messages) """ifself.is_stream:stream_iter=self._stream(messages,stop=stop,run_manager=run_manager,**kwargs)returngenerate_from_stream(stream_iter)request=self._prepare_request(messages,stop=stop,stream=False,**kwargs)response=self.client.chat(request)content=self._provider.chat_response_to_text(response)ifstopisnotNone:content=enforce_stop_tokens(content,stop)generation_info=self._provider.chat_generation_info(response)llm_output={"model_id":response.data.model_id,"model_version":response.data.model_version,"request_id":response.request_id,"content-length":response.headers["content-length"],}if"tool_calls"ingeneration_info:tool_calls=[_convert_oci_tool_call_to_langchain(tool_call)fortool_callinresponse.data.chat_response.tool_calls]else:tool_calls=[]message=AIMessage(content=content,additional_kwargs=generation_info,tool_calls=tool_calls,)returnChatResult(generations=[ChatGeneration(message=message,generation_info=generation_info)],llm_output=llm_output,)def_stream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:request=self._prepare_request(messages,stop=stop,stream=True,**kwargs)response=self.client.chat(request)foreventinresponse.data.events():event_data=json.loads(event.data)ifnotself._provider.is_chat_stream_end(event_data):# still streamingdelta=self._provider.chat_stream_to_text(event_data)chunk=ChatGenerationChunk(message=AIMessageChunk(content=delta))ifrun_manager:run_manager.on_llm_new_token(delta,chunk=chunk)yieldchunkelse:# stream endgeneration_info=self._provider.chat_stream_generation_info(event_data)tool_call_chunks=[]iftool_calls:=generation_info.get("tool_calls"):content=self._provider.chat_stream_to_text(event_data)try:tool_call_chunks=[ToolCallChunk(name=tool_call["function"].get("name"),args=tool_call["function"].get("arguments"),id=tool_call.get("id"),index=tool_call.get("index"),)fortool_callintool_calls]exceptKeyError:passelse:content=""message=AIMessageChunk(content=content,additional_kwargs=generation_info,tool_call_chunks=tool_call_chunks,)yieldChatGenerationChunk(message=message,generation_info=generation_info,)