[docs]classBaseMessagePromptTemplate(Serializable,ABC):"""Base class for message prompt templates."""@classmethoddefis_lc_serializable(cls)->bool:"""Return whether or not the class is serializable. Returns: True"""returnTrue@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]
[docs]@abstractmethoddefformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Format messages from kwargs. Should return a list of BaseMessages. Args: **kwargs: Keyword arguments to use for formatting. Returns: List of BaseMessages. """
[docs]asyncdefaformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Async format messages from kwargs. Should return a list of BaseMessages. Args: **kwargs: Keyword arguments to use for formatting. Returns: List of BaseMessages. """returnself.format_messages(**kwargs)
@property@abstractmethoddefinput_variables(self)->List[str]:"""Input variables for this prompt template. Returns: List of input variables. """
[docs]defpretty_repr(self,html:bool=False)->str:"""Human-readable representation. Args: html: Whether to format as HTML. Defaults to False. Returns: Human-readable representation. """raiseNotImplementedError
[docs]defpretty_print(self)->None:"""Print a human-readable representation."""print(self.pretty_repr(html=is_interactive_env()))# noqa: T201
def__add__(self,other:Any)->ChatPromptTemplate:"""Combine two prompt templates. Args: other: Another prompt template. Returns: Combined prompt template. """prompt=ChatPromptTemplate(messages=[self])# type: ignore[call-arg]returnprompt+other
[docs]classMessagesPlaceholder(BaseMessagePromptTemplate):"""Prompt template that assumes variable is already list of messages. A placeholder which can be used to pass in a list of messages. Direct usage: .. code-block:: python from langchain_core.prompts import MessagesPlaceholder prompt = MessagesPlaceholder("history") prompt.format_messages() # raises KeyError prompt = MessagesPlaceholder("history", optional=True) prompt.format_messages() # returns empty list [] prompt.format_messages( history=[ ("system", "You are an AI assistant."), ("human", "Hello!"), ] ) # -> [ # SystemMessage(content="You are an AI assistant."), # HumanMessage(content="Hello!"), # ] Building a prompt with chat history: .. code-block:: python from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder prompt = ChatPromptTemplate.from_messages( [ ("system", "You are a helpful assistant."), MessagesPlaceholder("history"), ("human", "{question}") ] ) prompt.invoke( { "history": [("human", "what's 5 + 2"), ("ai", "5 + 2 is 7")], "question": "now multiply that by 4" } ) # -> ChatPromptValue(messages=[ # SystemMessage(content="You are a helpful assistant."), # HumanMessage(content="what's 5 + 2"), # AIMessage(content="5 + 2 is 7"), # HumanMessage(content="now multiply that by 4"), # ]) Limiting the number of messages: .. code-block:: python from langchain_core.prompts import MessagesPlaceholder prompt = MessagesPlaceholder("history", n_messages=1) prompt.format_messages( history=[ ("system", "You are an AI assistant."), ("human", "Hello!"), ] ) # -> [ # HumanMessage(content="Hello!"), # ] """variable_name:str"""Name of variable to use as messages."""optional:bool=False"""If True format_messages can be called with no arguments and will return an empty list. If False then a named argument with name `variable_name` must be passed in, even if the value is an empty list."""n_messages:Optional[PositiveInt]=None"""Maximum number of messages to include. If None, then will include all. Defaults to None."""@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]def__init__(self,variable_name:str,*,optional:bool=False,**kwargs:Any):super().__init__(variable_name=variable_name,optional=optional,**kwargs)
[docs]defformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Format messages from kwargs. Args: **kwargs: Keyword arguments to use for formatting. Returns: List of BaseMessage. Raises: ValueError: If variable is not a list of messages. """value=(kwargs.get(self.variable_name,[])ifself.optionalelsekwargs[self.variable_name])ifnotisinstance(value,list):raiseValueError(f"variable {self.variable_name} should be a list of base messages, "f"got {value} of type {type(value)}")value=convert_to_messages(value)ifself.n_messages:value=value[-self.n_messages:]returnvalue
@propertydefinput_variables(self)->List[str]:"""Input variables for this prompt template. Returns: List of input variable names. """return[self.variable_name]ifnotself.optionalelse[]
[docs]defpretty_repr(self,html:bool=False)->str:"""Human-readable representation. Args: html: Whether to format as HTML. Defaults to False. Returns: Human-readable representation. """var="{"+self.variable_name+"}"ifhtml:title=get_msg_title_repr("Messages Placeholder",bold=True)var=get_colored_text(var,"yellow")else:title=get_msg_title_repr("Messages Placeholder")returnf"{title}\n\n{var}"
MessagePromptTemplateT=TypeVar("MessagePromptTemplateT",bound="BaseStringMessagePromptTemplate")"""Type variable for message prompt templates."""
[docs]classBaseStringMessagePromptTemplate(BaseMessagePromptTemplate,ABC):"""Base class for message prompt templates that use a string prompt template."""prompt:StringPromptTemplate"""String prompt template."""additional_kwargs:dict=Field(default_factory=dict)"""Additional keyword arguments to pass to the prompt template."""@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]
[docs]@classmethoddeffrom_template(cls:Type[MessagePromptTemplateT],template:str,template_format:str="f-string",partial_variables:Optional[Dict[str,Any]]=None,**kwargs:Any,)->MessagePromptTemplateT:"""Create a class from a string template. Args: template: a template. template_format: format of the template. Defaults to "f-string". partial_variables: A dictionary of variables that can be used to partially fill in the template. For example, if the template is `"{variable1} {variable2}"`, and `partial_variables` is `{"variable1": "foo"}`, then the final prompt will be `"foo {variable2}"`. Defaults to None. **kwargs: keyword arguments to pass to the constructor. Returns: A new instance of this class. """prompt=PromptTemplate.from_template(template,template_format=template_format,partial_variables=partial_variables,)returncls(prompt=prompt,**kwargs)
[docs]@classmethoddeffrom_template_file(cls:Type[MessagePromptTemplateT],template_file:Union[str,Path],input_variables:List[str],**kwargs:Any,)->MessagePromptTemplateT:"""Create a class from a template file. Args: template_file: path to a template file. String or Path. input_variables: list of input variables. **kwargs: keyword arguments to pass to the constructor. Returns: A new instance of this class. """prompt=PromptTemplate.from_file(template_file,input_variables)returncls(prompt=prompt,**kwargs)
[docs]@abstractmethoddefformat(self,**kwargs:Any)->BaseMessage:"""Format the prompt template. Args: **kwargs: Keyword arguments to use for formatting. Returns: Formatted message. """
[docs]asyncdefaformat(self,**kwargs:Any)->BaseMessage:"""Async format the prompt template. Args: **kwargs: Keyword arguments to use for formatting. Returns: Formatted message. """returnself.format(**kwargs)
[docs]defformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Format messages from kwargs. Args: **kwargs: Keyword arguments to use for formatting. Returns: List of BaseMessages. """return[self.format(**kwargs)]
[docs]asyncdefaformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Async format messages from kwargs. Args: **kwargs: Keyword arguments to use for formatting. Returns: List of BaseMessages. """return[awaitself.aformat(**kwargs)]
@propertydefinput_variables(self)->List[str]:""" Input variables for this prompt template. Returns: List of input variable names. """returnself.prompt.input_variables
[docs]defpretty_repr(self,html:bool=False)->str:"""Human-readable representation. Args: html: Whether to format as HTML. Defaults to False. Returns: Human-readable representation. """# TODO: Handle partialstitle=self.__class__.__name__.replace("MessagePromptTemplate"," Message")title=get_msg_title_repr(title,bold=html)returnf"{title}\n\n{self.prompt.pretty_repr(html=html)}"
[docs]classChatMessagePromptTemplate(BaseStringMessagePromptTemplate):"""Chat message prompt template."""role:str"""Role of the message."""@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]
[docs]defformat(self,**kwargs:Any)->BaseMessage:"""Format the prompt template. Args: **kwargs: Keyword arguments to use for formatting. Returns: Formatted message. """text=self.prompt.format(**kwargs)returnChatMessage(content=text,role=self.role,additional_kwargs=self.additional_kwargs)
[docs]asyncdefaformat(self,**kwargs:Any)->BaseMessage:"""Async format the prompt template. Args: **kwargs: Keyword arguments to use for formatting. Returns: Formatted message. """text=awaitself.prompt.aformat(**kwargs)returnChatMessage(content=text,role=self.role,additional_kwargs=self.additional_kwargs)
_StringImageMessagePromptTemplateT=TypeVar("_StringImageMessagePromptTemplateT",bound="_StringImageMessagePromptTemplate")class_TextTemplateParam(TypedDict,total=False):text:Union[str,Dict]class_ImageTemplateParam(TypedDict,total=False):image_url:Union[str,Dict]class_StringImageMessagePromptTemplate(BaseMessagePromptTemplate):"""Human message prompt template. This is a message sent from the user."""prompt:Union[StringPromptTemplate,List[Union[StringPromptTemplate,ImagePromptTemplate]]]"""Prompt template."""additional_kwargs:dict=Field(default_factory=dict)"""Additional keyword arguments to pass to the prompt template."""_msg_class:Type[BaseMessage]@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]@classmethoddeffrom_template(cls:Type[_StringImageMessagePromptTemplateT],template:Union[str,List[Union[str,_TextTemplateParam,_ImageTemplateParam]]],template_format:str="f-string",*,partial_variables:Optional[Dict[str,Any]]=None,**kwargs:Any,)->_StringImageMessagePromptTemplateT:"""Create a class from a string template. Args: template: a template. template_format: format of the template. Defaults to "f-string". partial_variables: A dictionary of variables that can be used too partially. Defaults to None. **kwargs: keyword arguments to pass to the constructor. Returns: A new instance of this class. Raises: ValueError: If the template is not a string or list of strings. """ifisinstance(template,str):prompt:Union[StringPromptTemplate,List]=PromptTemplate.from_template(template,template_format=template_format,partial_variables=partial_variables,)returncls(prompt=prompt,**kwargs)elifisinstance(template,list):if(partial_variablesisnotNone)andlen(partial_variables)>0:raiseValueError("Partial variables are not supported for list of templates.")prompt=[]fortmplintemplate:ifisinstance(tmpl,str)orisinstance(tmpl,dict)and"text"intmpl:ifisinstance(tmpl,str):text:str=tmplelse:text=cast(_TextTemplateParam,tmpl)["text"]# type: ignore[assignment]prompt.append(PromptTemplate.from_template(text,template_format=template_format))elifisinstance(tmpl,dict)and"image_url"intmpl:img_template=cast(_ImageTemplateParam,tmpl)["image_url"]input_variables=[]ifisinstance(img_template,str):vars=get_template_variables(img_template,"f-string")ifvars:iflen(vars)>1:raiseValueError("Only one format variable allowed per image"f" template.\nGot: {vars}"f"\nFrom: {tmpl}")input_variables=[vars[0]]img_template={"url":img_template}img_template_obj=ImagePromptTemplate(input_variables=input_variables,template=img_template)elifisinstance(img_template,dict):img_template=dict(img_template)forkeyin["url","path","detail"]:ifkeyinimg_template:input_variables.extend(get_template_variables(img_template[key],"f-string"))img_template_obj=ImagePromptTemplate(input_variables=input_variables,template=img_template)else:raiseValueError()prompt.append(img_template_obj)else:raiseValueError()returncls(prompt=prompt,**kwargs)else:raiseValueError()@classmethoddeffrom_template_file(cls:Type[_StringImageMessagePromptTemplateT],template_file:Union[str,Path],input_variables:List[str],**kwargs:Any,)->_StringImageMessagePromptTemplateT:"""Create a class from a template file. Args: template_file: path to a template file. String or Path. input_variables: list of input variables. **kwargs: keyword arguments to pass to the constructor. Returns: A new instance of this class. """withopen(str(template_file),"r")asf:template=f.read()returncls.from_template(template,input_variables=input_variables,**kwargs)defformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Format messages from kwargs. Args: **kwargs: Keyword arguments to use for formatting. Returns: List of BaseMessages. """return[self.format(**kwargs)]asyncdefaformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Async format messages from kwargs. Args: **kwargs: Keyword arguments to use for formatting. Returns: List of BaseMessages. """return[awaitself.aformat(**kwargs)]@propertydefinput_variables(self)->List[str]:""" Input variables for this prompt template. Returns: List of input variable names. """prompts=self.promptifisinstance(self.prompt,list)else[self.prompt]input_variables=[ivforpromptinpromptsforivinprompt.input_variables]returninput_variablesdefformat(self,**kwargs:Any)->BaseMessage:"""Format the prompt template. Args: **kwargs: Keyword arguments to use for formatting. Returns: Formatted message. """ifisinstance(self.prompt,StringPromptTemplate):text=self.prompt.format(**kwargs)returnself._msg_class(content=text,additional_kwargs=self.additional_kwargs)else:content:List=[]forpromptinself.prompt:inputs={var:kwargs[var]forvarinprompt.input_variables}ifisinstance(prompt,StringPromptTemplate):formatted:Union[str,ImageURL]=prompt.format(**inputs)content.append({"type":"text","text":formatted})elifisinstance(prompt,ImagePromptTemplate):formatted=prompt.format(**inputs)content.append({"type":"image_url","image_url":formatted})returnself._msg_class(content=content,additional_kwargs=self.additional_kwargs)asyncdefaformat(self,**kwargs:Any)->BaseMessage:"""Async format the prompt template. Args: **kwargs: Keyword arguments to use for formatting. Returns: Formatted message. """ifisinstance(self.prompt,StringPromptTemplate):text=awaitself.prompt.aformat(**kwargs)returnself._msg_class(content=text,additional_kwargs=self.additional_kwargs)else:content:List=[]forpromptinself.prompt:inputs={var:kwargs[var]forvarinprompt.input_variables}ifisinstance(prompt,StringPromptTemplate):formatted:Union[str,ImageURL]=awaitprompt.aformat(**inputs)content.append({"type":"text","text":formatted})elifisinstance(prompt,ImagePromptTemplate):formatted=awaitprompt.aformat(**inputs)content.append({"type":"image_url","image_url":formatted})returnself._msg_class(content=content,additional_kwargs=self.additional_kwargs)defpretty_repr(self,html:bool=False)->str:"""Human-readable representation. Args: html: Whether to format as HTML. Defaults to False. Returns: Human-readable representation. """# TODO: Handle partialstitle=self.__class__.__name__.replace("MessagePromptTemplate"," Message")title=get_msg_title_repr(title,bold=html)prompts=self.promptifisinstance(self.prompt,list)else[self.prompt]prompt_reprs="\n\n".join(prompt.pretty_repr(html=html)forpromptinprompts)returnf"{title}\n\n{prompt_reprs}"
[docs]classHumanMessagePromptTemplate(_StringImageMessagePromptTemplate):"""Human message prompt template. This is a message sent from the user."""_msg_class:Type[BaseMessage]=HumanMessage
[docs]classAIMessagePromptTemplate(_StringImageMessagePromptTemplate):"""AI message prompt template. This is a message sent from the AI."""_msg_class:Type[BaseMessage]=AIMessage@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]
[docs]classSystemMessagePromptTemplate(_StringImageMessagePromptTemplate):"""System message prompt template. This is a message that is not sent to the user. """_msg_class:Type[BaseMessage]=SystemMessage@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]
[docs]classBaseChatPromptTemplate(BasePromptTemplate,ABC):"""Base class for chat prompt templates."""@propertydeflc_attributes(self)->Dict:""" Return a list of attribute names that should be included in the serialized kwargs. These attributes must be accepted by the constructor. """return{"input_variables":self.input_variables}
[docs]defformat(self,**kwargs:Any)->str:"""Format the chat template into a string. Args: **kwargs: keyword arguments to use for filling in template variables in all the template messages in this chat template. Returns: formatted string. """returnself.format_prompt(**kwargs).to_string()
[docs]asyncdefaformat(self,**kwargs:Any)->str:"""Async format the chat template into a string. Args: **kwargs: keyword arguments to use for filling in template variables in all the template messages in this chat template. Returns: formatted string. """return(awaitself.aformat_prompt(**kwargs)).to_string()
[docs]defformat_prompt(self,**kwargs:Any)->PromptValue:"""Format prompt. Should return a PromptValue. Args: **kwargs: Keyword arguments to use for formatting. Returns: PromptValue. """messages=self.format_messages(**kwargs)returnChatPromptValue(messages=messages)
[docs]asyncdefaformat_prompt(self,**kwargs:Any)->PromptValue:"""Async format prompt. Should return a PromptValue. Args: **kwargs: Keyword arguments to use for formatting. Returns: PromptValue. """messages=awaitself.aformat_messages(**kwargs)returnChatPromptValue(messages=messages)
[docs]@abstractmethoddefformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Format kwargs into a list of messages."""
[docs]asyncdefaformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Async format kwargs into a list of messages."""returnself.format_messages(**kwargs)
[docs]defpretty_repr(self,html:bool=False)->str:"""Human-readable representation. Args: html: Whether to format as HTML. Defaults to False. Returns: Human-readable representation. """raiseNotImplementedError
[docs]defpretty_print(self)->None:"""Print a human-readable representation."""print(self.pretty_repr(html=is_interactive_env()))# noqa: T201
[docs]classChatPromptTemplate(BaseChatPromptTemplate):"""Prompt template for chat models. Use to create flexible templated prompts for chat models. Examples: .. versionchanged:: 0.2.24 You can pass any Message-like formats supported by ``ChatPromptTemplate.from_messages()`` directly to ``ChatPromptTemplate()`` init. .. code-block:: python from langchain_core.prompts import ChatPromptTemplate template = ChatPromptTemplate([ ("system", "You are a helpful AI bot. Your name is {name}."), ("human", "Hello, how are you doing?"), ("ai", "I'm doing well, thanks!"), ("human", "{user_input}"), ]) prompt_value = template.invoke( { "name": "Bob", "user_input": "What is your name?" } ) # Output: # ChatPromptValue( # messages=[ # SystemMessage(content='You are a helpful AI bot. Your name is Bob.'), # HumanMessage(content='Hello, how are you doing?'), # AIMessage(content="I'm doing well, thanks!"), # HumanMessage(content='What is your name?') # ] #) Messages Placeholder: .. code-block:: python # In addition to Human/AI/Tool/Function messages, # you can initialize the template with a MessagesPlaceholder # either using the class directly or with the shorthand tuple syntax: template = ChatPromptTemplate([ ("system", "You are a helpful AI bot."), # Means the template will receive an optional list of messages under # the "conversation" key ("placeholder", "{conversation}") # Equivalently: # MessagesPlaceholder(variable_name="conversation", optional=True) ]) prompt_value = template.invoke( { "conversation": [ ("human", "Hi!"), ("ai", "How can I assist you today?"), ("human", "Can you make me an ice cream sundae?"), ("ai", "No.") ] } ) # Output: # ChatPromptValue( # messages=[ # SystemMessage(content='You are a helpful AI bot.'), # HumanMessage(content='Hi!'), # AIMessage(content='How can I assist you today?'), # HumanMessage(content='Can you make me an ice cream sundae?'), # AIMessage(content='No.'), # ] #) Single-variable template: If your prompt has only a single input variable (i.e., 1 instance of "{variable_nams}"), and you invoke the template with a non-dict object, the prompt template will inject the provided argument into that variable location. .. code-block:: python from langchain_core.prompts import ChatPromptTemplate template = ChatPromptTemplate([ ("system", "You are a helpful AI bot. Your name is Carl."), ("human", "{user_input}"), ]) prompt_value = template.invoke("Hello, there!") # Equivalent to # prompt_value = template.invoke({"user_input": "Hello, there!"}) # Output: # ChatPromptValue( # messages=[ # SystemMessage(content='You are a helpful AI bot. Your name is Carl.'), # HumanMessage(content='Hello, there!'), # ] # ) """# noqa: E501messages:List[MessageLike]"""List of messages consisting of either message prompt templates or messages."""validate_template:bool=False"""Whether or not to try validating the template."""def__init__(self,messages:Sequence[MessageLikeRepresentation],*,template_format:Literal["f-string","mustache","jinja2"]="f-string",**kwargs:Any,)->None:"""Create a chat prompt template from a variety of message formats. Args: messages: sequence of message representations. A message can be represented using the following formats: (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of (message type, template); e.g., ("human", "{user_input}"), (4) 2-tuple of (message class, template), (5) a string which is shorthand for ("human", template); e.g., "{user_input}". template_format: format of the template. Defaults to "f-string". input_variables: A list of the names of the variables whose values are required as inputs to the prompt. optional_variables: A list of the names of the variables for placeholder or MessagePlaceholder that are optional. These variables are auto inferred from the prompt and user need not provide them. partial_variables: A dictionary of the partial variables the prompt template carries. Partial variables populate the template so that you don't need to pass them in every time you call the prompt. validate_template: Whether to validate the template. input_types: A dictionary of the types of the variables the prompt template expects. If not provided, all variables are assumed to be strings. Returns: A chat prompt template. Examples: Instantiation from a list of message templates: .. code-block:: python template = ChatPromptTemplate([ ("human", "Hello, how are you?"), ("ai", "I'm doing well, thanks!"), ("human", "That's good to hear."), ]) Instantiation from mixed message formats: .. code-block:: python template = ChatPromptTemplate([ SystemMessage(content="hello"), ("human", "Hello, how are you?"), ]) """_messages=[_convert_to_message(message,template_format)formessageinmessages]# Automatically infer input variables from messagesinput_vars:Set[str]=set()optional_variables:Set[str]=set()partial_vars:Dict[str,Any]={}for_messagein_messages:ifisinstance(_message,MessagesPlaceholder)and_message.optional:partial_vars[_message.variable_name]=[]optional_variables.add(_message.variable_name)elifisinstance(_message,(BaseChatPromptTemplate,BaseMessagePromptTemplate)):input_vars.update(_message.input_variables)kwargs={**dict(input_variables=sorted(input_vars),optional_variables=sorted(optional_variables),partial_variables=partial_vars,),**kwargs,}cast(Type[ChatPromptTemplate],super()).__init__(messages=_messages,**kwargs)@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","chat"]def__add__(self,other:Any)->ChatPromptTemplate:"""Combine two prompt templates. Args: other: Another prompt template. Returns: Combined prompt template. """# Allow for easy combiningifisinstance(other,ChatPromptTemplate):returnChatPromptTemplate(messages=self.messages+other.messages)# type: ignore[call-arg]elifisinstance(other,(BaseMessagePromptTemplate,BaseMessage,BaseChatPromptTemplate)):returnChatPromptTemplate(messages=self.messages+[other])# type: ignore[call-arg]elifisinstance(other,(list,tuple)):_other=ChatPromptTemplate.from_messages(other)returnChatPromptTemplate(messages=self.messages+_other.messages)# type: ignore[call-arg]elifisinstance(other,str):prompt=HumanMessagePromptTemplate.from_template(other)returnChatPromptTemplate(messages=self.messages+[prompt])# type: ignore[call-arg]else:raiseNotImplementedError(f"Unsupported operand type for +: {type(other)}")@root_validator(pre=True)defvalidate_input_variables(cls,values:dict)->dict:"""Validate input variables. If input_variables is not set, it will be set to the union of all input variables in the messages. Args: values: values to validate. Returns: Validated values. Raises: ValueError: If input variables do not match. """messages=values["messages"]input_vars=set()optional_variables=set()input_types:Dict[str,Any]=values.get("input_types",{})formessageinmessages:ifisinstance(message,(BaseMessagePromptTemplate,BaseChatPromptTemplate)):input_vars.update(message.input_variables)ifisinstance(message,MessagesPlaceholder):if"partial_variables"notinvalues:values["partial_variables"]={}if(message.optionalandmessage.variable_namenotinvalues["partial_variables"]):values["partial_variables"][message.variable_name]=[]optional_variables.add(message.variable_name)ifmessage.variable_namenotininput_types:input_types[message.variable_name]=List[AnyMessage]if"partial_variables"invalues:input_vars=input_vars-set(values["partial_variables"])ifoptional_variables:input_vars=input_vars-optional_variablesif"input_variables"invaluesandvalues.get("validate_template"):ifinput_vars!=set(values["input_variables"]):raiseValueError("Got mismatched input_variables. "f"Expected: {input_vars}. "f"Got: {values['input_variables']}")else:values["input_variables"]=sorted(input_vars)ifoptional_variables:values["optional_variables"]=sorted(optional_variables)values["input_types"]=input_typesreturnvalues
[docs]@classmethoddeffrom_template(cls,template:str,**kwargs:Any)->ChatPromptTemplate:"""Create a chat prompt template from a template string. Creates a chat template consisting of a single message assumed to be from the human. Args: template: template string **kwargs: keyword arguments to pass to the constructor. Returns: A new instance of this class. """prompt_template=PromptTemplate.from_template(template,**kwargs)message=HumanMessagePromptTemplate(prompt=prompt_template)returncls.from_messages([message])
[docs]@classmethod@deprecated("0.0.1",alternative="from_messages classmethod",pending=True)deffrom_role_strings(cls,string_messages:List[Tuple[str,str]])->ChatPromptTemplate:"""Create a chat prompt template from a list of (role, template) tuples. Args: string_messages: list of (role, template) tuples. Returns: a chat prompt template. """returncls(# type: ignore[call-arg]messages=[ChatMessagePromptTemplate.from_template(template,role=role)forrole,templateinstring_messages])
[docs]@classmethod@deprecated("0.0.1",alternative="from_messages classmethod",pending=True)deffrom_strings(cls,string_messages:List[Tuple[Type[BaseMessagePromptTemplate],str]])->ChatPromptTemplate:"""Create a chat prompt template from a list of (role class, template) tuples. Args: string_messages: list of (role class, template) tuples. Returns: a chat prompt template. """returncls.from_messages(string_messages)
[docs]@classmethoddeffrom_messages(cls,messages:Sequence[MessageLikeRepresentation],template_format:Literal["f-string","mustache","jinja2"]="f-string",)->ChatPromptTemplate:"""Create a chat prompt template from a variety of message formats. Examples: Instantiation from a list of message templates: .. code-block:: python template = ChatPromptTemplate.from_messages([ ("human", "Hello, how are you?"), ("ai", "I'm doing well, thanks!"), ("human", "That's good to hear."), ]) Instantiation from mixed message formats: .. code-block:: python template = ChatPromptTemplate.from_messages([ SystemMessage(content="hello"), ("human", "Hello, how are you?"), ]) Args: messages: sequence of message representations. A message can be represented using the following formats: (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of (message type, template); e.g., ("human", "{user_input}"), (4) 2-tuple of (message class, template), (5) a string which is shorthand for ("human", template); e.g., "{user_input}". template_format: format of the template. Defaults to "f-string". Returns: a chat prompt template. """returncls(messages,template_format=template_format)
[docs]defformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Format the chat template into a list of finalized messages. Args: **kwargs: keyword arguments to use for filling in template variables in all the template messages in this chat template. Returns: list of formatted messages. """kwargs=self._merge_partial_and_user_variables(**kwargs)result=[]formessage_templateinself.messages:ifisinstance(message_template,BaseMessage):result.extend([message_template])elifisinstance(message_template,(BaseMessagePromptTemplate,BaseChatPromptTemplate)):message=message_template.format_messages(**kwargs)result.extend(message)else:raiseValueError(f"Unexpected input: {message_template}")returnresult
[docs]asyncdefaformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Async format the chat template into a list of finalized messages. Args: **kwargs: keyword arguments to use for filling in template variables in all the template messages in this chat template. Returns: list of formatted messages. Raises: ValueError: If unexpected input. """kwargs=self._merge_partial_and_user_variables(**kwargs)result=[]formessage_templateinself.messages:ifisinstance(message_template,BaseMessage):result.extend([message_template])elifisinstance(message_template,(BaseMessagePromptTemplate,BaseChatPromptTemplate)):message=awaitmessage_template.aformat_messages(**kwargs)result.extend(message)else:raiseValueError(f"Unexpected input: {message_template}")returnresult
[docs]defpartial(self,**kwargs:Any)->ChatPromptTemplate:"""Get a new ChatPromptTemplate with some input variables already filled in. Args: **kwargs: keyword arguments to use for filling in template variables. Ought to be a subset of the input variables. Returns: A new ChatPromptTemplate. Example: .. code-block:: python from langchain_core.prompts import ChatPromptTemplate template = ChatPromptTemplate.from_messages( [ ("system", "You are an AI assistant named {name}."), ("human", "Hi I'm {user}"), ("ai", "Hi there, {user}, I'm {name}."), ("human", "{input}"), ] ) template2 = template.partial(user="Lucy", name="R2D2") template2.format_messages(input="hello") """prompt_dict=self.__dict__.copy()prompt_dict["input_variables"]=list(set(self.input_variables).difference(kwargs))prompt_dict["partial_variables"]={**self.partial_variables,**kwargs}returntype(self)(**prompt_dict)
[docs]defappend(self,message:MessageLikeRepresentation)->None:"""Append a message to the end of the chat template. Args: message: representation of a message to append. """self.messages.append(_convert_to_message(message))
[docs]defextend(self,messages:Sequence[MessageLikeRepresentation])->None:"""Extend the chat template with a sequence of messages. Args: messages: sequence of message representations to append. """self.messages.extend([_convert_to_message(message)formessageinmessages])
@overloaddef__getitem__(self,index:int)->MessageLike:...@overloaddef__getitem__(self,index:slice)->ChatPromptTemplate:...def__getitem__(self,index:Union[int,slice])->Union[MessageLike,ChatPromptTemplate]:"""Use to index into the chat template."""ifisinstance(index,slice):start,stop,step=index.indices(len(self.messages))messages=self.messages[start:stop:step]returnChatPromptTemplate.from_messages(messages)else:returnself.messages[index]def__len__(self)->int:"""Get the length of the chat template."""returnlen(self.messages)@propertydef_prompt_type(self)->str:"""Name of prompt type. Used for serialization."""return"chat"
[docs]defsave(self,file_path:Union[Path,str])->None:"""Save prompt to file. Args: file_path: path to file. """raiseNotImplementedError()
[docs]defpretty_repr(self,html:bool=False)->str:"""Human-readable representation. Args: html: Whether to format as HTML. Defaults to False. Returns: Human-readable representation. """# TODO: handle partialsreturn"\n\n".join(msg.pretty_repr(html=html)formsginself.messages)
def_create_template_from_message_type(message_type:str,template:Union[str,list],template_format:Literal["f-string","mustache","jinja2"]="f-string",)->BaseMessagePromptTemplate:"""Create a message prompt template from a message type and template string. Args: message_type: str the type of the message template (e.g., "human", "ai", etc.) template: str the template string. template_format: format of the template. Defaults to "f-string". Returns: a message prompt template of the appropriate type. Raises: ValueError: If unexpected message type. """ifmessage_typein("human","user"):message:BaseMessagePromptTemplate=HumanMessagePromptTemplate.from_template(template,template_format=template_format)elifmessage_typein("ai","assistant"):message=AIMessagePromptTemplate.from_template(cast(str,template),template_format=template_format)elifmessage_type=="system":message=SystemMessagePromptTemplate.from_template(cast(str,template),template_format=template_format)elifmessage_type=="placeholder":ifisinstance(template,str):iftemplate[0]!="{"ortemplate[-1]!="}":raiseValueError(f"Invalid placeholder template: {template}."" Expected a variable name surrounded by curly braces.")var_name=template[1:-1]message=MessagesPlaceholder(variable_name=var_name,optional=True)eliflen(template)==2andisinstance(template[1],bool):var_name_wrapped,is_optional=templateifnotisinstance(var_name_wrapped,str):raiseValueError("Expected variable name to be a string."f" Got: {var_name_wrapped}")ifvar_name_wrapped[0]!="{"orvar_name_wrapped[-1]!="}":raiseValueError(f"Invalid placeholder template: {var_name_wrapped}."" Expected a variable name surrounded by curly braces.")var_name=var_name_wrapped[1:-1]message=MessagesPlaceholder(variable_name=var_name,optional=is_optional)else:raiseValueError("Unexpected arguments for placeholder message type."" Expected either a single string variable name"" or a list of [variable_name: str, is_optional: bool]."f" Got: {template}")else:raiseValueError(f"Unexpected message type: {message_type}. Use one of 'human',"f" 'user', 'ai', 'assistant', or 'system'.")returnmessagedef_convert_to_message(message:MessageLikeRepresentation,template_format:Literal["f-string","mustache","jinja2"]="f-string",)->Union[BaseMessage,BaseMessagePromptTemplate,BaseChatPromptTemplate]:"""Instantiate a message from a variety of message formats. The message format can be one of the following: - BaseMessagePromptTemplate - BaseMessage - 2-tuple of (role string, template); e.g., ("human", "{user_input}") - 2-tuple of (message class, template) - string: shorthand for ("human", template); e.g., "{user_input}" Args: message: a representation of a message in one of the supported formats. template_format: format of the template. Defaults to "f-string". Returns: an instance of a message or a message template. Raises: ValueError: If unexpected message type. ValueError: If 2-tuple does not have 2 elements. """ifisinstance(message,(BaseMessagePromptTemplate,BaseChatPromptTemplate)):_message:Union[BaseMessage,BaseMessagePromptTemplate,BaseChatPromptTemplate]=messageelifisinstance(message,BaseMessage):_message=messageelifisinstance(message,str):_message=_create_template_from_message_type("human",message,template_format=template_format)elifisinstance(message,tuple):iflen(message)!=2:raiseValueError(f"Expected 2-tuple of (role, template), got {message}")message_type_str,template=messageifisinstance(message_type_str,str):_message=_create_template_from_message_type(message_type_str,template,template_format=template_format)else:_message=message_type_str(prompt=PromptTemplate.from_template(cast(str,template),template_format=template_format))else:raiseNotImplementedError(f"Unsupported message type: {type(message)}")return_message