[docs]asyncdefaenumerate(iterable:AsyncIterator[Any],start:int=0)->AsyncIterator[tuple[int,Any]]:"""Async version of enumerate function."""i=startasyncforxiniterable:yieldi,xi+=1
[docs]classIndexableBaseModel(BaseModel):"""Allows a BaseModel to return its fields by string variable indexing."""def__getitem__(self,item:str)->Any:returngetattr(self,item)
[docs]defconvert_dict_to_message(_dict:Mapping[str,Any])->BaseMessage:"""Convert a dictionary to a LangChain message. Args: _dict: The dictionary. Returns: The LangChain message. """role=_dict.get("role")ifrole=="user":returnHumanMessage(content=_dict.get("content",""))elifrole=="assistant":# Fix for azure# Also OpenAI returns None for tool invocationscontent=_dict.get("content","")or""additional_kwargs:Dict={}iffunction_call:=_dict.get("function_call"):additional_kwargs["function_call"]=dict(function_call)iftool_calls:=_dict.get("tool_calls"):additional_kwargs["tool_calls"]=tool_callsreturnAIMessage(content=content,additional_kwargs=additional_kwargs)elifrole=="system":returnSystemMessage(content=_dict.get("content",""))elifrole=="function":returnFunctionMessage(content=_dict.get("content",""),name=_dict.get("name"))# type: ignore[arg-type]elifrole=="tool":additional_kwargs={}if"name"in_dict:additional_kwargs["name"]=_dict["name"]returnToolMessage(content=_dict.get("content",""),tool_call_id=_dict.get("tool_call_id"),# type: ignore[arg-type]additional_kwargs=additional_kwargs,)else:returnChatMessage(content=_dict.get("content",""),role=role)# type: ignore[arg-type]
[docs]defconvert_message_to_dict(message:BaseMessage)->dict:"""Convert a LangChain message to a dictionary. Args: message: The LangChain message. Returns: The dictionary. """message_dict:Dict[str,Any]ifisinstance(message,ChatMessage):message_dict={"role":message.role,"content":message.content}elifisinstance(message,HumanMessage):message_dict={"role":"user","content":message.content}elifisinstance(message,AIMessage):message_dict={"role":"assistant","content":message.content}if"function_call"inmessage.additional_kwargs:message_dict["function_call"]=message.additional_kwargs["function_call"]# If function call only, content is None not empty stringifmessage_dict["content"]=="":message_dict["content"]=Noneif"tool_calls"inmessage.additional_kwargs:message_dict["tool_calls"]=message.additional_kwargs["tool_calls"]# If tool calls only, content is None not empty stringifmessage_dict["content"]=="":message_dict["content"]=Noneelifisinstance(message,SystemMessage):message_dict={"role":"system","content":message.content}elifisinstance(message,FunctionMessage):message_dict={"role":"function","content":message.content,"name":message.name,}elifisinstance(message,ToolMessage):message_dict={"role":"tool","content":message.content,"tool_call_id":message.tool_call_id,}else:raiseTypeError(f"Got unknown type {message}")if"name"inmessage.additional_kwargs:message_dict["name"]=message.additional_kwargs["name"]returnmessage_dict
[docs]defconvert_openai_messages(messages:Sequence[Dict[str,Any]])->List[BaseMessage]:"""Convert dictionaries representing OpenAI messages to LangChain format. Args: messages: List of dictionaries representing OpenAI messages Returns: List of LangChain BaseMessage objects. """return[convert_dict_to_message(m)forminmessages]
def_convert_message_chunk(chunk:BaseMessageChunk,i:int)->dict:_dict:Dict[str,Any]={}ifisinstance(chunk,AIMessageChunk):ifi==0:# Only shows up in the first chunk_dict["role"]="assistant"if"function_call"inchunk.additional_kwargs:_dict["function_call"]=chunk.additional_kwargs["function_call"]# If the first chunk is a function call, the content is not empty string,# not missing, but None.ifi==0:_dict["content"]=Noneelse:_dict["content"]=chunk.contentelse:raiseValueError(f"Got unexpected streaming chunk type: {type(chunk)}")# This only happens at the end of streams, and OpenAI returns as empty dictif_dict=={"content":""}:_dict={}return_dictdef_convert_message_chunk_to_delta(chunk:BaseMessageChunk,i:int)->Dict[str,Any]:_dict=_convert_message_chunk(chunk,i)return{"choices":[{"delta":_dict}]}
def_has_assistant_message(session:ChatSession)->bool:"""Check if chat session has an assistant message."""returnany([isinstance(m,AIMessage)forminsession["messages"]])
[docs]defconvert_messages_for_finetuning(sessions:Iterable[ChatSession],)->List[List[dict]]:"""Convert messages to a list of lists of dictionaries for fine-tuning. Args: sessions: The chat sessions. Returns: The list of lists of dictionaries. """return[[convert_message_to_dict(s)forsinsession["messages"]]forsessioninsessionsif_has_assistant_message(session)]