[docs]defload(prompt_path:str,configuration:str="default")->Prompty:"""Load a prompty file and return a Prompty object. Args: prompt_path: The path to the prompty file. configuration: The configuration to use. Defaults to "default". Returns: The Prompty object. """file_path=Path(prompt_path)ifnotfile_path.is_absolute():# get caller's path (take into account trace frame)caller=Path(traceback.extract_stack()[-3].filename)file_path=Path(caller.parent/file_path).resolve().absolute()# load dictionary from prompty filematter=Frontmatter.read_file(file_path.__fspath__())attributes=matter["attributes"]content=matter["body"]# normalize attribute dictionary resolve keys and filesattributes=Prompty.normalize(attributes,file_path.parent)# load global configurationif"model"notinattributes:attributes["model"]={}# pull model settings out of attributestry:model=ModelSettings(**attributes.pop("model"))exceptExceptionase:raiseValueError(f"Error in model settings: {e}")# pull template settingstry:if"template"inattributes:t=attributes.pop("template")ifisinstance(t,dict):template=TemplateSettings(**t)# has to be a string denoting the typeelse:template=TemplateSettings(type=t,parser="prompty")else:template=TemplateSettings(type="mustache",parser="prompty")exceptExceptionase:raiseValueError(f"Error in template loader: {e}")# formalize inputs and outputsif"inputs"inattributes:try:inputs={k:PropertySettings(**v)for(k,v)inattributes.pop("inputs").items()}exceptExceptionase:raiseValueError(f"Error in inputs: {e}")else:inputs={}if"outputs"inattributes:try:outputs={k:PropertySettings(**v)for(k,v)inattributes.pop("outputs").items()}exceptExceptionase:raiseValueError(f"Error in outputs: {e}")else:outputs={}# recursive loading of base promptyif"base"inattributes:# load the base prompty from the same directory as the current promptybase=load(file_path.parent/attributes["base"])# hoist the base prompty's attributes to the current promptymodel.api=base.model.apiifmodel.api==""elsemodel.apimodel.configuration=param_hoisting(model.configuration,base.model.configuration)model.parameters=param_hoisting(model.parameters,base.model.parameters)model.response=param_hoisting(model.response,base.model.response)attributes["sample"]=param_hoisting(attributes,base.sample,"sample")p=Prompty(**attributes,model=model,inputs=inputs,outputs=outputs,template=template,content=content,file=file_path,basePrompty=base,)else:p=Prompty(**attributes,model=model,inputs=inputs,outputs=outputs,template=template,content=content,file=file_path,)returnp
[docs]defprepare(prompt:Prompty,inputs:Dict[str,Any]={},)->Any:"""Prepare the inputs for the prompty. Args: prompt: The Prompty object. inputs: The inputs to the prompty. Defaults to {}. Returns: The prepared inputs. """invoker=InvokerFactory()inputs=param_hoisting(inputs,prompt.sample)ifprompt.template.type=="NOOP":render=prompt.contentelse:# renderresult=invoker("renderer",prompt.template.type,prompt,SimpleModel(item=inputs),)render=result.itemifprompt.template.parser=="NOOP":result=renderelse:# parseresult=invoker("parser",f"{prompt.template.parser}.{prompt.model.api}",prompt,SimpleModel(item=result.item),)ifisinstance(result,SimpleModel):returnresult.itemelse:returnresult
[docs]defrun(prompt:Prompty,content:Union[Dict,List,str],configuration:Dict[str,Any]={},parameters:Dict[str,Any]={},raw:bool=False,)->Any:"""Run the prompty. Args: prompt: The Prompty object. content: The content to run the prompty on. configuration: The configuration to use. Defaults to {}. parameters: The parameters to use. Defaults to {}. raw: Whether to return the raw output. Defaults to False. Returns: The result of running the prompty. """invoker=InvokerFactory()ifconfiguration!={}:prompt.model.configuration=param_hoisting(configuration,prompt.model.configuration)ifparameters!={}:prompt.model.parameters=param_hoisting(parameters,prompt.model.parameters)# executeresult=invoker("executor",prompt.model.configuration["type"],prompt,SimpleModel(item=content),)# skip?ifnotraw:# processresult=invoker("processor",prompt.model.configuration["type"],prompt,result,)ifisinstance(result,SimpleModel):returnresult.itemelse:returnresult
[docs]defexecute(prompt:Union[str,Prompty],configuration:Dict[str,Any]={},parameters:Dict[str,Any]={},inputs:Dict[str,Any]={},raw:bool=False,connection:str="default",)->Any:"""Execute a prompty. Args: prompt: The prompt to execute. Can be a path to a prompty file or a Prompty object. configuration: The configuration to use. Defaults to {}. parameters: The parameters to use. Defaults to {}. inputs: The inputs to the prompty. Defaults to {}. raw: Whether to return the raw output. Defaults to False. connection: The connection to use. Defaults to "default". Returns: The result of executing the prompty. """ifisinstance(prompt,str):prompt=load(prompt,connection)# prepare contentcontent=prepare(prompt,inputs)# run LLM modelresult=run(prompt,content,configuration,parameters,raw)returnresult