Helpers
Helper components provide utility functions to help manage data, tasks, and other components in your flow.
Create List
This component takes a list of text inputs and converts each text into a data object. These data objects are then collected into a list, which is returned as the output.
Parameters
Name | Display Name | Info |
---|---|---|
texts |
Texts |
Enter one or more texts. This input accepts multiple text entries. |
Display Name | Name | Info |
---|---|---|
Data List |
list |
A list of data objects created from the input texts. |
Component code
create_list.py
from langflow.custom import Component
from langflow.inputs import StrInput
from langflow.schema import Data
from langflow.template import Output
class CreateListComponent(Component):
display_name = "Create List"
description = "Creates a list of texts."
icon = "list"
name = "CreateList"
legacy = True
inputs = [
StrInput(
name="texts",
display_name="Texts",
info="Enter one or more texts.",
is_list=True,
),
]
outputs = [
Output(display_name="Data List", name="list", method="create_list"),
]
def create_list(self) -> list[Data]:
data = [Data(text=text) for text in self.texts]
self.status = data
return data
Current Date
The Current Date component returns the current date and time in a selected timezone. This component provides a flexible way to obtain timezone-specific date and time information within a Langflow pipeline.
Parameters
Name | Display Name | Info |
---|---|---|
timezone |
Timezone |
Select the timezone for the current date and time. |
Name | Display Name | Info |
---|---|---|
current_date |
Current Date |
The resulting current date and time in the selected timezone. |
Component code
current_date.py
from datetime import datetime
from zoneinfo import ZoneInfo
from loguru import logger
from langflow.custom import Component
from langflow.io import DropdownInput, Output
from langflow.schema.message import Message
class CurrentDateComponent(Component):
display_name = "Current Date"
description = "Returns the current date and time in the selected timezone."
icon = "clock"
name = "CurrentDate"
inputs = [
DropdownInput(
name="timezone",
display_name="Timezone",
options=[
"UTC",
"US/Eastern",
"US/Central",
"US/Mountain",
"US/Pacific",
"Europe/London",
"Europe/Paris",
"Europe/Berlin",
"Europe/Moscow",
"Asia/Tokyo",
"Asia/Shanghai",
"Asia/Singapore",
"Asia/Dubai",
"Australia/Sydney",
"Australia/Melbourne",
"Pacific/Auckland",
"America/Sao_Paulo",
"America/Mexico_City",
"America/Toronto",
"America/Vancouver",
"Africa/Cairo",
"Africa/Johannesburg",
"Atlantic/Reykjavik",
"Indian/Maldives",
"America/Bogota",
"America/Lima",
"America/Santiago",
"America/Buenos_Aires",
"America/Caracas",
"America/La_Paz",
"America/Montevideo",
"America/Asuncion",
"America/Cuiaba",
],
value="UTC",
info="Select the timezone for the current date and time.",
tool_mode=True,
),
]
outputs = [
Output(display_name="Current Date", name="current_date", method="get_current_date"),
]
def get_current_date(self) -> Message:
try:
tz = ZoneInfo(self.timezone)
current_date = datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S %Z")
result = f"Current date and time in {self.timezone}: {current_date}"
self.status = result
return Message(text=result)
except Exception as e: # noqa: BLE001
logger.opt(exception=True).debug("Error getting current date")
error_message = f"Error: {e}"
self.status = error_message
return Message(text=error_message)
Custom Component
Use this component as a template to create your custom component.
Component code
custom_component.py
# from langflow.field_typing import Data
from langflow.custom import Component
from langflow.io import MessageTextInput, Output
from langflow.schema import Data
class CustomComponent(Component):
display_name = "Custom Component"
description = "Use as a template to create your own component."
documentation: str = "http://docs.langflow.org/components/custom"
icon = "code"
name = "CustomComponent"
inputs = [
MessageTextInput(
name="input_value",
display_name="Input Value",
info="This is a custom component Input",
value="Hello, World!",
tool_mode=True,
),
]
outputs = [
Output(display_name="Output", name="output", method="build_output"),
]
def build_output(self) -> Data:
data = Data(value=self.input_value)
self.status = data
return data
Hierarchical Task
This component creates and manages hierarchical tasks for CrewAI agents in a Playground environment.
For more information, see the CrewAI documentation.
Parameters
Name | Display Name | Info |
---|---|---|
task_description |
Description |
Descriptive text detailing task’s purpose and execution. |
expected_output |
Expected Output |
Clear definition of expected task outcome. |
tools |
Tools |
List of tools/resources limited for task execution. Uses the Agent tools by default. |
Name | Display Name | Info |
---|---|---|
task_output |
Task |
The built hierarchical task. |
Component code
hierarchical_task.py
404: Not Found
ID Generator
This component generates a unique ID.
Parameters
Name | Display Name | Info |
---|---|---|
value |
Value |
Unique ID generated. |
Component code
id_generator.py
import uuid
from typing import Any
from typing_extensions import override
from langflow.custom import Component
from langflow.io import MessageTextInput, Output
from langflow.schema import dotdict
from langflow.schema.message import Message
class IDGeneratorComponent(Component):
display_name = "ID Generator"
description = "Generates a unique ID."
icon = "fingerprint"
name = "IDGenerator"
inputs = [
MessageTextInput(
name="unique_id",
display_name="Value",
info="The generated unique ID.",
refresh_button=True,
tool_mode=True,
),
]
outputs = [
Output(display_name="ID", name="id", method="generate_id"),
]
@override
def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):
if field_name == "unique_id":
build_config[field_name]["value"] = str(uuid.uuid4())
return build_config
def generate_id(self) -> Message:
unique_id = self.unique_id or str(uuid.uuid4())
self.status = f"Generated ID: {unique_id}"
return Message(text=unique_id)
Message history
This component retrieves and manages chat messages from Langflow tables or an external memory.
This component was named Chat Memory prior to Langflow version 1.1.0.
Parameters
Name | Display Name | Info |
---|---|---|
memory |
External Memory |
Retrieve messages from an external memory. If empty, it uses the Langflow tables. |
sender |
Sender Type |
Filter by sender type. |
sender_name |
Sender Name |
Filter by sender name. |
n_messages |
Number of Messages |
Number of messages to retrieve. |
session_id |
Session ID |
The session ID of the chat. If empty, the current session ID parameter is used. |
order |
Order |
Order of the messages. |
template |
Template |
The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data. |
Name | Display Name | Info |
---|---|---|
messages |
Messages (data object) |
Retrieved messages as data objects. |
messages_text |
Messages (text) |
Retrieved messages formatted as text. |
lc_memory |
Memory |
The created LangChain-compatible memory object. |
Component code
memory.py
from langflow.custom import Component
from langflow.helpers.data import data_to_text
from langflow.inputs import HandleInput
from langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output
from langflow.memory import aget_messages
from langflow.schema import Data
from langflow.schema.message import Message
from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER
class MemoryComponent(Component):
display_name = "Message History"
description = "Retrieves stored chat messages from Langflow tables or an external memory."
icon = "message-square-more"
name = "Memory"
inputs = [
HandleInput(
name="memory",
display_name="External Memory",
input_types=["Memory"],
info="Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
),
DropdownInput(
name="sender",
display_name="Sender Type",
options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, "Machine and User"],
value="Machine and User",
info="Filter by sender type.",
advanced=True,
),
MessageTextInput(
name="sender_name",
display_name="Sender Name",
info="Filter by sender name.",
advanced=True,
),
IntInput(
name="n_messages",
display_name="Number of Messages",
value=100,
info="Number of messages to retrieve.",
advanced=True,
),
MessageTextInput(
name="session_id",
display_name="Session ID",
info="The session ID of the chat. If empty, the current session ID parameter will be used.",
advanced=True,
),
DropdownInput(
name="order",
display_name="Order",
options=["Ascending", "Descending"],
value="Ascending",
info="Order of the messages.",
advanced=True,
tool_mode=True,
),
MultilineInput(
name="template",
display_name="Template",
info="The template to use for formatting the data. "
"It can contain the keys {text}, {sender} or any other key in the message data.",
value="{sender_name}: {text}",
advanced=True,
),
]
outputs = [
Output(display_name="Data", name="messages", method="retrieve_messages"),
Output(display_name="Text", name="messages_text", method="retrieve_messages_as_text"),
]
async def retrieve_messages(self) -> Data:
sender = self.sender
sender_name = self.sender_name
session_id = self.session_id
n_messages = self.n_messages
order = "DESC" if self.order == "Descending" else "ASC"
if sender == "Machine and User":
sender = None
if self.memory:
# override session_id
self.memory.session_id = session_id
stored = await self.memory.aget_messages()
# langchain memories are supposed to return messages in ascending order
if order == "DESC":
stored = stored[::-1]
if n_messages:
stored = stored[:n_messages]
stored = [Message.from_lc_message(m) for m in stored]
if sender:
expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER
stored = [m for m in stored if m.type == expected_type]
else:
stored = await aget_messages(
sender=sender,
sender_name=sender_name,
session_id=session_id,
limit=n_messages,
order=order,
)
self.status = stored
return stored
async def retrieve_messages_as_text(self) -> Message:
stored_text = data_to_text(self.template, await self.retrieve_messages())
self.status = stored_text
return Message(text=stored_text)
Sequential Task
This component creates and manage sequential tasks for CrewAI agents. It builds a SequentialTask object with the provided description, expected output, and agent, allowing for the specification of tools and asynchronous execution.
For more information, see the CrewAI documentation.
Parameters
Name | Display Name | Info |
---|---|---|
task_description |
Description |
Descriptive text detailing task’s purpose and execution. |
expected_output |
Expected Output |
Clear definition of expected task outcome. |
tools |
Tools |
List of tools/resources limited for task execution. Uses the Agent tools by default. |
agent |
Agent |
CrewAI Agent that will perform the task. |
task |
Task |
CrewAI Task that will perform the task. |
async_execution |
Async Execution |
Boolean flag indicating asynchronous task execution. |
Name | Display Name | Info |
---|---|---|
task_output |
Task |
The built sequential task or list of tasks. |
Component code
sequential_task.py
404: Not Found
Store Message
This component stores chat messages or text into Langflow tables or an external memory.
It provides flexibility in managing message storage and retrieval within a chat system.
Parameters
Name | Display Name | Info |
---|---|---|
message |
Message |
The chat message to be stored. (Required) |
memory |
External Memory |
The external memory to store the message. If empty, it will use the Langflow tables. |
sender |
Sender |
The sender of the message. Can be Machine or User. If empty, the current sender parameter will be used. |
sender_name |
Sender Name |
The name of the sender. Can be AI or User. If empty, the current sender parameter will be used. |
session_id |
Session ID |
The session ID of the chat. If empty, the current session ID parameter will be used. |
Name | Display Name | Info |
---|---|---|
stored_messages |
Stored Messages |
The list of stored messages after the current message has been added. |
Component code
store_message.py
from langflow.custom import Component
from langflow.inputs import HandleInput
from langflow.inputs.inputs import MessageTextInput
from langflow.memory import aget_messages, astore_message
from langflow.schema.message import Message
from langflow.template import Output
from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI
class MessageStoreComponent(Component):
display_name = "Message Store"
description = "Stores a chat message or text into Langflow tables or an external memory."
icon = "message-square-text"
name = "StoreMessage"
inputs = [
MessageTextInput(
name="message", display_name="Message", info="The chat message to be stored.", required=True, tool_mode=True
),
HandleInput(
name="memory",
display_name="External Memory",
input_types=["Memory"],
info="The external memory to store the message. If empty, it will use the Langflow tables.",
),
MessageTextInput(
name="sender",
display_name="Sender",
info="The sender of the message. Might be Machine or User. "
"If empty, the current sender parameter will be used.",
advanced=True,
),
MessageTextInput(
name="sender_name",
display_name="Sender Name",
info="The name of the sender. Might be AI or User. If empty, the current sender parameter will be used.",
advanced=True,
),
MessageTextInput(
name="session_id",
display_name="Session ID",
info="The session ID of the chat. If empty, the current session ID parameter will be used.",
value="",
advanced=True,
),
]
outputs = [
Output(display_name="Stored Messages", name="stored_messages", method="store_message", hidden=True),
]
async def store_message(self) -> Message:
message = Message(text=self.message) if isinstance(self.message, str) else self.message
message.session_id = self.session_id or message.session_id
message.sender = self.sender or message.sender or MESSAGE_SENDER_AI
message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI
if self.memory:
# override session_id
self.memory.session_id = message.session_id
lc_message = message.to_lc_message()
await self.memory.aadd_messages([lc_message])
stored_message = await self.memory.aget_messages()
stored_message = [Message.from_lc_message(m) for m in stored_message]
if message.sender:
stored_message = [m for m in stored_message if m.sender == message.sender]
else:
await astore_message(message, flow_id=self.graph.flow_id)
stored_messages = await aget_messages(
session_id=message.session_id, sender_name=message.sender_name, sender=message.sender
)
if not stored_messages:
msg = "No messages were stored. Please ensure that the session ID and sender are properly set."
raise ValueError(msg)
stored_message = stored_messages[0]
self.status = stored_message
return stored_message
Structured Output
The Structured Output component transforms LLM responses into structured data formats.
Parameters
Name | Display Name | Info |
---|---|---|
llm |
Language Model |
The language model to use to generate the structured output. |
input_value |
Input message |
The input message to be processed by the language model. |
schema_name |
Schema Name |
Provide a name for the output data schema. |
output_schema |
Output Schema |
Define the structure and data types for the model’s output. |
multiple |
Generate Multiple |
Set to True if the model should generate a list of outputs instead of a single output. |
Name | Display Name | Info |
---|---|---|
structured_output |
Structured Output |
The resulting structured output based on the defined schema. |
Component code
structured_output.py
from typing import TYPE_CHECKING, cast
from pydantic import BaseModel, Field, create_model
from langflow.base.models.chat_result import get_chat_result
from langflow.custom import Component
from langflow.helpers.base_model import build_model_from_schema
from langflow.io import BoolInput, HandleInput, MessageTextInput, Output, StrInput, TableInput
from langflow.schema.data import Data
if TYPE_CHECKING:
from langflow.field_typing.constants import LanguageModel
class StructuredOutputComponent(Component):
display_name = "Structured Output"
description = (
"Transforms LLM responses into **structured data formats**. Ideal for extracting specific information "
"or creating consistent outputs."
)
name = "StructuredOutput"
icon = "braces"
inputs = [
HandleInput(
name="llm",
display_name="Language Model",
info="The language model to use to generate the structured output.",
input_types=["LanguageModel"],
required=True,
),
MessageTextInput(
name="input_value",
display_name="Input Message",
info="The input message to the language model.",
tool_mode=True,
),
StrInput(
name="schema_name",
display_name="Schema Name",
info="Provide a name for the output data schema.",
advanced=True,
),
TableInput(
name="output_schema",
display_name="Output Schema",
info="Define the structure and data types for the model's output.",
required=True,
table_schema=[
{
"name": "name",
"display_name": "Name",
"type": "str",
"description": "Specify the name of the output field.",
"default": "field",
},
{
"name": "description",
"display_name": "Description",
"type": "str",
"description": "Describe the purpose of the output field.",
"default": "description of field",
},
{
"name": "type",
"display_name": "Type",
"type": "str",
"description": (
"Indicate the data type of the output field (e.g., str, int, float, bool, list, dict)."
),
"default": "text",
},
{
"name": "multiple",
"display_name": "Multiple",
"type": "boolean",
"description": "Set to True if this output field should be a list of the specified type.",
"default": "False",
},
],
value=[{"name": "field", "description": "description of field", "type": "text", "multiple": "False"}],
),
BoolInput(
name="multiple",
advanced=True,
display_name="Generate Multiple",
info="Set to True if the model should generate a list of outputs instead of a single output.",
),
]
outputs = [
Output(name="structured_output", display_name="Structured Output", method="build_structured_output"),
]
def build_structured_output(self) -> Data:
if not hasattr(self.llm, "with_structured_output"):
msg = "Language model does not support structured output."
raise TypeError(msg)
if not self.output_schema:
msg = "Output schema cannot be empty"
raise ValueError(msg)
output_model_ = build_model_from_schema(self.output_schema)
if self.multiple:
output_model = create_model(
self.schema_name,
objects=(list[output_model_], Field(description=f"A list of {self.schema_name}.")), # type: ignore[valid-type]
)
else:
output_model = output_model_
try:
llm_with_structured_output = cast("LanguageModel", self.llm).with_structured_output(schema=output_model) # type: ignore[valid-type, attr-defined]
except NotImplementedError as exc:
msg = f"{self.llm.__class__.__name__} does not support structured output."
raise TypeError(msg) from exc
config_dict = {
"run_name": self.display_name,
"project_name": self.get_project_name(),
"callbacks": self.get_langchain_callbacks(),
}
output = get_chat_result(runnable=llm_with_structured_output, input_value=self.input_value, config=config_dict)
if isinstance(output, BaseModel):
output_dict = output.model_dump()
else:
msg = f"Output should be a Pydantic BaseModel, got {type(output)} ({output})"
raise TypeError(msg)
return Data(data=output_dict)