Prompts

A prompt is the input provided to a language model, where your input data is structured in a way that the model can understand.

A prompt template offers a reproducible method for generating prompts.

Prompt

This component creates a prompt template with dynamic variables. This is useful for structuring prompts and passing dynamic data to a language model.

Parameters

Inputs
Name Display Name Info

template

Template

The prompt template with dynamic variables

Outputs
Name Display Name Info

prompt

Prompt Message

The built prompt message

Prompt template field

The template field allows you to create other fields dynamically by using curly brackets. For example, if you have a template like Hello {name}, how are you?, a new field called name is created. Prompt variables can be created with any name inside curly brackets, e.g. {variable_name}.

Component code

prompt.py
from langflow.base.prompts.api_utils import process_prompt_template
from langflow.custom import Component
from langflow.inputs.inputs import DefaultPromptField
from langflow.io import Output, PromptInput
from langflow.schema.message import Message
from langflow.template.utils import update_template_values


class PromptComponent(Component):
    display_name: str = "Prompt"
    description: str = "Create a prompt template with dynamic variables."
    icon = "prompts"
    trace_type = "prompt"
    name = "Prompt"

    inputs = [
        PromptInput(name="template", display_name="Template"),
    ]

    outputs = [
        Output(display_name="Prompt Message", name="prompt", method="build_prompt"),
    ]

    async def build_prompt(self) -> Message:
        prompt = Message.from_template(**self._attributes)
        self.status = prompt.text
        return prompt

    def _update_template(self, frontend_node: dict):
        prompt_template = frontend_node["template"]["template"]["value"]
        custom_fields = frontend_node["custom_fields"]
        frontend_node_template = frontend_node["template"]
        _ = process_prompt_template(
            template=prompt_template,
            name="template",
            custom_fields=custom_fields,
            frontend_node_template=frontend_node_template,
        )
        return frontend_node

    def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):
        """This function is called after the code validation is done."""
        frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)
        template = frontend_node["template"]["template"]["value"]
        # Kept it duplicated for backwards compatibility
        _ = process_prompt_template(
            template=template,
            name="template",
            custom_fields=frontend_node["custom_fields"],
            frontend_node_template=frontend_node["template"],
        )
        # Now that template is updated, we need to grab any values that were set in the current_frontend_node
        # and update the frontend_node with those values
        update_template_values(new_template=frontend_node, previous_template=current_frontend_node["template"])
        return frontend_node

    def _get_fallback_input(self, **kwargs):
        return DefaultPromptField(**kwargs)

Langchain Hub prompt

This component allows you to use prompts from LangChain Hub in your flow.

Parameters

Inputs
Name Type Description

langchain_api_key

SecretString

Your LangChain API key for authentication

langchain_hub_prompt

String

The LangChain Hub prompt to use (e.g., "efriis/my-first-prompt")

Outputs
Name Type Description

prompt

Message

The built prompt from LangChain Hub

Component code

langchain_hub.py
import re

from langchain_core.prompts import HumanMessagePromptTemplate

from langflow.custom import Component
from langflow.inputs import DefaultPromptField, SecretStrInput, StrInput
from langflow.io import Output
from langflow.schema.message import Message


class LangChainHubPromptComponent(Component):
    display_name: str = "Prompt Hub"
    description: str = "Prompt Component that uses LangChain Hub prompts"
    beta = True
    icon = "LangChain"
    trace_type = "prompt"
    name = "LangChain Hub Prompt"

    inputs = [
        SecretStrInput(
            name="langchain_api_key",
            display_name="Your LangChain API Key",
            info="The LangChain API Key to use.",
            required=True,
        ),
        StrInput(
            name="langchain_hub_prompt",
            display_name="LangChain Hub Prompt",
            info="The LangChain Hub prompt to use, i.e., 'efriis/my-first-prompt'",
            refresh_button=True,
            required=True,
        ),
    ]

    outputs = [
        Output(display_name="Build Prompt", name="prompt", method="build_prompt"),
    ]

    def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
        # If the field is not langchain_hub_prompt or the value is empty, return the build config as is
        if field_name != "langchain_hub_prompt" or not field_value:
            return build_config

        # Fetch the template
        template = self._fetch_langchain_hub_template()

        # Get the template's messages
        if hasattr(template, "messages"):
            template_messages = template.messages
        else:
            template_messages = [HumanMessagePromptTemplate(prompt=template)]

        # Extract the messages from the prompt data
        prompt_template = [message_data.prompt for message_data in template_messages]

        # Regular expression to find all instances of {<string>}
        pattern = r"\{(.*?)\}"

        # Get all the custom fields
        custom_fields: list[str] = []
        full_template = ""
        for message in prompt_template:
            # Find all matches
            matches = re.findall(pattern, message.template)
            custom_fields += matches

            # Create a string version of the full template
            full_template = full_template + "\n" + message.template

        # No need to reprocess if we have them already
        if all("param_" + custom_field in build_config for custom_field in custom_fields):
            return build_config

        # Easter egg: Show template in info popup
        build_config["langchain_hub_prompt"]["info"] = full_template

        # Remove old parameter inputs if any
        for key in build_config.copy():
            if key.startswith("param_"):
                del build_config[key]

        # Now create inputs for each
        for custom_field in custom_fields:
            new_parameter = DefaultPromptField(
                name=f"param_{custom_field}",
                display_name=custom_field,
                info="Fill in the value for {" + custom_field + "}",
            ).to_dict()

            # Add the new parameter to the build config
            build_config[f"param_{custom_field}"] = new_parameter

        return build_config

    async def build_prompt(
        self,
    ) -> Message:
        # Fetch the template
        template = self._fetch_langchain_hub_template()

        # Get the parameters from the attributes
        params_dict = {param: getattr(self, "param_" + param, f"{{{param}}}") for param in template.input_variables}
        original_params = {k: v.text if hasattr(v, "text") else v for k, v in params_dict.items() if v is not None}
        prompt_value = template.invoke(original_params)

        # Update the template with the new value
        original_params["template"] = prompt_value.to_string()

        # Now pass the filtered attributes to the function
        prompt = Message.from_template(**original_params)

        self.status = prompt.text

        return prompt

    def _fetch_langchain_hub_template(self):
        import langchain.hub

        # Check if the api key is provided
        if not self.langchain_api_key:
            msg = "Please provide a LangChain API Key"

            raise ValueError(msg)

        # Pull the prompt from LangChain Hub
        return langchain.hub.pull(self.langchain_hub_prompt, api_key=self.langchain_api_key)

Was this helpful?

Give Feedback

How can we improve the documentation?

© 2024 DataStax | Privacy policy | Terms of use

Apache, Apache Cassandra, Cassandra, Apache Tomcat, Tomcat, Apache Lucene, Apache Solr, Apache Hadoop, Hadoop, Apache Pulsar, Pulsar, Apache Spark, Spark, Apache TinkerPop, TinkerPop, Apache Kafka and Kafka are either registered trademarks or trademarks of the Apache Software Foundation or its subsidiaries in Canada, the United States and/or other countries. Kubernetes is the registered trademark of the Linux Foundation.

General Inquiries: +1 (650) 389-6000, info@datastax.com