[docs]defload_prompt_from_config(config:dict)->BasePromptTemplate:"""Load prompt from Config Dict. Args: config: Dict containing the prompt configuration. Returns: A PromptTemplate object. Raises: ValueError: If the prompt type is not supported. """if"_type"notinconfig:logger.warning("No `_type` key found, defaulting to `prompt`.")config_type=config.pop("_type","prompt")ifconfig_typenotintype_to_loader_dict:raiseValueError(f"Loading {config_type} prompt not supported")prompt_loader=type_to_loader_dict[config_type]returnprompt_loader(config)
def_load_template(var_name:str,config:dict)->dict:"""Load template from the path if applicable."""# Check if template_path exists in config.iff"{var_name}_path"inconfig:# If it does, make sure template variable doesn't also exist.ifvar_nameinconfig:raiseValueError(f"Both `{var_name}_path` and `{var_name}` cannot be provided.")# Pop the template path from the config.template_path=Path(config.pop(f"{var_name}_path"))# Load the template.iftemplate_path.suffix==".txt":withopen(template_path)asf:template=f.read()else:raiseValueError# Set the template variable to the extracted variable.config[var_name]=templatereturnconfigdef_load_examples(config:dict)->dict:"""Load examples if necessary."""ifisinstance(config["examples"],list):passelifisinstance(config["examples"],str):withopen(config["examples"])asf:ifconfig["examples"].endswith(".json"):examples=json.load(f)elifconfig["examples"].endswith((".yaml",".yml")):examples=yaml.safe_load(f)else:raiseValueError("Invalid file format. Only json or yaml formats are supported.")config["examples"]=exampleselse:raiseValueError("Invalid examples format. Only list or string are supported.")returnconfigdef_load_output_parser(config:dict)->dict:"""Load output parser."""if"output_parser"inconfigandconfig["output_parser"]:_config=config.pop("output_parser")output_parser_type=_config.pop("_type")ifoutput_parser_type=="default":output_parser=StrOutputParser(**_config)else:raiseValueError(f"Unsupported output parser {output_parser_type}")config["output_parser"]=output_parserreturnconfigdef_load_few_shot_prompt(config:dict)->FewShotPromptTemplate:"""Load the "few shot" prompt from the config."""# Load the suffix and prefix templates.config=_load_template("suffix",config)config=_load_template("prefix",config)# Load the example prompt.if"example_prompt_path"inconfig:if"example_prompt"inconfig:raiseValueError("Only one of example_prompt and example_prompt_path should ""be specified.")config["example_prompt"]=load_prompt(config.pop("example_prompt_path"))else:config["example_prompt"]=load_prompt_from_config(config["example_prompt"])# Load the examples.config=_load_examples(config)config=_load_output_parser(config)returnFewShotPromptTemplate(**config)def_load_prompt(config:dict)->PromptTemplate:"""Load the prompt template from config."""# Load the template from disk if necessary.config=_load_template("template",config)config=_load_output_parser(config)template_format=config.get("template_format","f-string")iftemplate_format=="jinja2":# Disabled due to:# https://github.com/langchain-ai/langchain/issues/4394raiseValueError(f"Loading templates with '{template_format}' format is no longer supported "f"since it can lead to arbitrary code execution. Please migrate to using "f"the 'f-string' template format, which does not suffer from this issue.")returnPromptTemplate(**config)
[docs]defload_prompt(path:Union[str,Path],encoding:Optional[str]=None)->BasePromptTemplate:"""Unified method for loading a prompt from LangChainHub or local fs. Args: path: Path to the prompt file. encoding: Encoding of the file. Defaults to None. Returns: A PromptTemplate object. Raises: RuntimeError: If the path is a Lang Chain Hub path. """ifisinstance(path,str)andpath.startswith("lc://"):raiseRuntimeError("Loading from the deprecated github-based Hub is no longer supported. ""Please use the new LangChain Hub at https://smith.langchain.com/hub ""instead.")return_load_prompt_from_file(path,encoding)
def_load_prompt_from_file(file:Union[str,Path],encoding:Optional[str]=None)->BasePromptTemplate:"""Load prompt from file."""# Convert file to a Path object.ifisinstance(file,str):file_path=Path(file)else:file_path=file# Load from either json or yaml.iffile_path.suffix==".json":withopen(file_path,encoding=encoding)asf:config=json.load(f)eliffile_path.suffix.endswith((".yaml",".yml")):withopen(file_path,encoding=encoding)asf:config=yaml.safe_load(f)else:raiseValueError(f"Got unsupported file type {file_path.suffix}")# Load the prompt from the config now.returnload_prompt_from_config(config)def_load_chat_prompt(config:Dict)->ChatPromptTemplate:"""Load chat prompt from config"""messages=config.pop("messages")template=messages[0]["prompt"].pop("template")ifmessageselseNoneconfig.pop("input_variables")ifnottemplate:raiseValueError("Can't load chat prompt without template")returnChatPromptTemplate.from_template(template=template,**config)type_to_loader_dict:Dict[str,Callable[[dict],BasePromptTemplate]]={"prompt":_load_prompt,"few_shot":_load_few_shot_prompt,"chat":_load_chat_prompt,}