Prompts

A prompt is the input provided to a language model, where your input data is structured in a way that the model can understand.

A prompt template offers a reproducible method for generating prompts.

Prompt

This component creates a prompt template with dynamic variables. This is useful for structuring prompts and passing dynamic data to a language model.

Parameters

Inputs
Name Display Name Info

template

Template

The prompt template with dynamic variables

Outputs
Name Display Name Info

prompt

Prompt Message

The built prompt message

Prompt template field

The template field allows you to create other fields dynamically by using curly brackets. For example, if you have a template like Hello {name}, how are you?, a new field called name is created. Prompt variables can be created with any name inside curly brackets, e.g. {variable_name}.

Component code

Prompt.py
from langflow.base.prompts.api_utils import process_prompt_template
from langflow.custom import Component
from langflow.inputs.inputs import DefaultPromptField
from langflow.io import Output, PromptInput
from langflow.schema.message import Message
from langflow.template.utils import update_template_values


class PromptComponent(Component):
    display_name: str = "Prompt"
    description: str = "Create a prompt template with dynamic variables."
    icon = "prompts"
    trace_type = "prompt"
    name = "Prompt"

    inputs = [
        PromptInput(name="template", display_name="Template"),
    ]

    outputs = [
        Output(display_name="Prompt Message", name="prompt", method="build_prompt"),
    ]

    async def build_prompt(
        self,
    ) -> Message:
        prompt = await Message.from_template_and_variables(**self._attributes)
        self.status = prompt.text
        return prompt

    def _update_template(self, frontend_node: dict):
        prompt_template = frontend_node["template"]["template"]["value"]
        custom_fields = frontend_node["custom_fields"]
        frontend_node_template = frontend_node["template"]
        _ = process_prompt_template(
            template=prompt_template,
            name="template",
            custom_fields=custom_fields,
            frontend_node_template=frontend_node_template,
        )
        return frontend_node

    def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):
        """
        This function is called after the code validation is done.
        """
        frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)
        template = frontend_node["template"]["template"]["value"]
        # Kept it duplicated for backwards compatibility
        _ = process_prompt_template(
            template=template,
            name="template",
            custom_fields=frontend_node["custom_fields"],
            frontend_node_template=frontend_node["template"],
        )
        # Now that template is updated, we need to grab any values that were set in the current_frontend_node
        # and update the frontend_node with those values
        update_template_values(new_template=frontend_node, previous_template=current_frontend_node["template"])
        return frontend_node

    def _get_fallback_input(self, **kwargs):
        return DefaultPromptField(**kwargs)

Langchain Hub prompt

This component allows you to use prompts from LangChain Hub in your flow.

Parameters

Inputs
Name Type Description

langchain_api_key

SecretString

Your LangChain API key for authentication

langchain_hub_prompt

String

The LangChain Hub prompt to use (e.g., "efriis/my-first-prompt")

Outputs
Name Type Description

prompt

Message

The built prompt from LangChain Hub

Component code

LangChainHubPrompt.py
from typing import List

from langflow.custom import Component
from langflow.inputs import StrInput, SecretStrInput, DefaultPromptField
from langflow.io import Output
from langflow.schema.message import Message


import re


class LangChainHubPromptComponent(Component):
    display_name: str = "LangChain Hub"
    description: str = "Prompt Component that uses LangChain Hub prompts"
    beta = True
    icon = "prompts"
    trace_type = "prompt"
    name = "LangChain Hub Prompt"

    inputs = [
        SecretStrInput(
            name="langchain_api_key",
            display_name="Your LangChain API Key",
            info="The LangChain API Key to use.",
        ),
        StrInput(
            name="langchain_hub_prompt",
            display_name="LangChain Hub Prompt",
            info="The LangChain Hub prompt to use.",
            value="efriis/my-first-prompt",
            refresh_button=True,
        ),
    ]

    outputs = [
        Output(display_name="Build Prompt", name="prompt", method="build_prompt"),
    ]

    def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
        if field_name == "langchain_hub_prompt":
            template = self._fetch_langchain_hub_template()

            # Extract the messages from the prompt data
            prompt_template = []
            for message_data in template.messages:
                prompt_template.append(message_data.prompt)

            # Regular expression to find all instances of {<string>}
            pattern = r"\{(.*?)\}"

            # Get all the custom fields
            custom_fields: List[str] = []
            full_template = ""
            for message in prompt_template:
                # Find all matches
                matches = re.findall(pattern, message.template)
                custom_fields = custom_fields + matches

                # Create a string version of the full template
                full_template = full_template + "\n" + message.template

            # No need to reprocess if we have them already
            if all(["param_" + custom_field in build_config for custom_field in custom_fields]):
                return build_config

            # Easter egg: Show template in info popup
            build_config["langchain_hub_prompt"]["info"] = full_template

            # Remove old parameter inputs if any
            for key, _ in build_config.copy().items():
                if key.startswith("param_"):
                    del build_config[key]

            # Now create inputs for each
            for custom_field in custom_fields:
                new_parameter = DefaultPromptField(
                    name=f"param_{custom_field}",
                    display_name=custom_field,
                    info="Fill in the value for {" + custom_field + "}",
                ).to_dict()

                build_config[f"param_{custom_field}"] = new_parameter

        return build_config

    async def build_prompt(
        self,
    ) -> Message:
        # Get the parameters that
        template = self._fetch_langchain_hub_template()  # TODO: doing this twice
        original_params = {k[6:] if k.startswith("param_") else k: v for k, v in self._attributes.items()}
        prompt_value = template.invoke(original_params)

        original_params["template"] = prompt_value.to_string()

        # Now pass the filtered attributes to the function
        prompt = await Message.from_template_and_variables(**original_params)

        self.status = prompt.text

        return prompt

    def _fetch_langchain_hub_template(self):
        import langchain.hub

        # Pull the prompt from LangChain Hub
        prompt_data = langchain.hub.pull(self.langchain_hub_prompt, api_key=self.langchain_api_key)

        return prompt_data

Was this helpful?

Give Feedback

How can we improve the documentation?

© 2024 DataStax | Privacy policy | Terms of use

Apache, Apache Cassandra, Cassandra, Apache Tomcat, Tomcat, Apache Lucene, Apache Solr, Apache Hadoop, Hadoop, Apache Pulsar, Pulsar, Apache Spark, Spark, Apache TinkerPop, TinkerPop, Apache Kafka and Kafka are either registered trademarks or trademarks of the Apache Software Foundation or its subsidiaries in Canada, the United States and/or other countries. Kubernetes is the registered trademark of the Linux Foundation.

General Inquiries: +1 (650) 389-6000, info@datastax.com