diff --git a/docs/docs/Configuration/environment-variables.md b/docs/docs/Configuration/environment-variables.md
index 832e44e68..389034a51 100644
--- a/docs/docs/Configuration/environment-variables.md
+++ b/docs/docs/Configuration/environment-variables.md
@@ -75,6 +75,8 @@ If it detects a supported environment variable, then it automatically adopts the
LANGFLOW_HOST=localhost
LANGFLOW_LANGCHAIN_CACHE=InMemoryCache
LANGFLOW_MAX_FILE_SIZE_UPLOAD=10000
+ LANGFLOW_MAX_ITEMS_LENGTH=100
+ LANGFLOW_MAX_TEXT_LENGTH=1000
LANGFLOW_LOG_LEVEL=error
LANGFLOW_OPEN_BROWSER=false
LANGFLOW_PORT=7860
@@ -202,6 +204,8 @@ The following table lists the environment variables supported by Langflow.
| LANGFLOW_LOG_FILE | String | Not set | Path to the log file. If this option is not set, logs are written to stdout. |
| LANGFLOW_LOG_RETRIEVER_BUFFER_SIZE | Integer | `10000` | Set the buffer size for log retrieval. Only used if `LANGFLOW_ENABLE_LOG_RETRIEVAL` is enabled. |
| LANGFLOW_MAX_FILE_SIZE_UPLOAD | Integer | `100` | Set the maximum file size for the upload in megabytes. See [`--max-file-size-upload` option](./configuration-cli.md#run-max-file-size-upload). |
+| LANGFLOW_MAX_ITEMS_LENGTH | Integer | `100` | Maximum number of items to store and display in the UI. Lists longer than this will be truncated when displayed in the UI. Does not affect data passed between components nor outputs. |
+| LANGFLOW_MAX_TEXT_LENGTH | Integer | `1000` | Maximum number of characters to store and display in the UI. Responses longer than this will be truncated when displayed in the UI. Does not truncate responses between components nor outputs. |
| LANGFLOW_MCP_SERVER_ENABLED | Boolean | `true` | If this option is set to False, Langflow does not enable the MCP server. |
| LANGFLOW_MCP_SERVER_ENABLE_PROGRESS_NOTIFICATIONS | Boolean | `false` | If this option is set to True, Langflow sends progress notifications in the MCP server. |
| LANGFLOW_NEW_USER_IS_ACTIVE | Boolean | `false` | When enabled, new users are automatically activated and can log in without requiring explicit activation by the superuser. |
@@ -259,6 +263,8 @@ LANGFLOW_HEALTH_CHECK_MAX_RETRIES=5
LANGFLOW_HOST=localhost
LANGFLOW_LANGCHAIN_CACHE=InMemoryCache
LANGFLOW_MAX_FILE_SIZE_UPLOAD=10000
+LANGFLOW_MAX_ITEMS_LENGTH=100
+LANGFLOW_MAX_TEXT_LENGTH=1000
LANGFLOW_LOG_LEVEL=error
LANGFLOW_OPEN_BROWSER=false
LANGFLOW_PORT=7860
@@ -298,6 +304,8 @@ Environment="LANGFLOW_HEALTH_CHECK_MAX_RETRIES=5"
Environment="LANGFLOW_HOST=localhost"
Environment="LANGFLOW_LANGCHAIN_CACHE=InMemoryCache"
Environment="LANGFLOW_MAX_FILE_SIZE_UPLOAD=10000"
+Environment="LANGFLOW_MAX_ITEMS_LENGTH=100"
+Environment="LANGFLOW_MAX_TEXT_LENGTH=1000"
Environment="LANGFLOW_LOG_ENV=container_json"
Environment="LANGFLOW_LOG_FILE=logs/langflow.log"
Environment="LANGFLOW_LOG_LEVEL=error"
@@ -344,6 +352,8 @@ Create or edit the `.vscode/tasks.json` file in your project root:
"LANGFLOW_HOST": "localhost",
"LANGFLOW_LANGCHAIN_CACHE": "InMemoryCache",
"LANGFLOW_MAX_FILE_SIZE_UPLOAD": "10000",
+ "LANGFLOW_MAX_ITEMS_LENGTH": "100",
+ "LANGFLOW_MAX_TEXT_LENGTH": "1000",
"LANGFLOW_LOG_ENV": "container_csv",
"LANGFLOW_LOG_FILE": "langflow.log",
"LANGFLOW_LOG_LEVEL": "error",
diff --git a/src/backend/base/langflow/api/v1/schemas.py b/src/backend/base/langflow/api/v1/schemas.py
index 1bef87e9d..2b6b3a0eb 100644
--- a/src/backend/base/langflow/api/v1/schemas.py
+++ b/src/backend/base/langflow/api/v1/schemas.py
@@ -18,8 +18,7 @@ from langflow.schema.dotdict import dotdict
from langflow.schema.graph import Tweaks
from langflow.schema.schema import InputType, OutputType, OutputValue
from langflow.serialization import constants as serialization_constants
-from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
-from langflow.serialization.serialization import serialize
+from langflow.serialization.serialization import get_max_items_length, get_max_text_length, serialize
from langflow.services.database.models.api_key.model import ApiKeyRead
from langflow.services.database.models.base import orjson_dumps
from langflow.services.database.models.flow.model import FlowCreate, FlowRead
@@ -276,18 +275,28 @@ class ResultDataResponse(BaseModel):
@field_serializer("results")
@classmethod
def serialize_results(cls, v):
- """Serialize results with custom handling for special types and truncation."""
- return serialize(v, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH)
+ """Serializes the results value with custom handling for special types and applies truncation limits.
+
+ Returns:
+ The serialized representation of the input value, truncated according to configured
+ maximum text length and item count.
+ """
+ return serialize(v, max_length=get_max_text_length(), max_items=get_max_items_length())
@model_serializer(mode="plain")
def serialize_model(self) -> dict:
- """Custom serializer for the entire model."""
+ """Serialize the entire model into a dictionary with truncation applied to large fields.
+
+ Returns:
+ dict: A dictionary representation of the model with serialized and truncated
+ results, outputs, logs, message, and artifacts.
+ """
return {
"results": self.serialize_results(self.results),
- "outputs": serialize(self.outputs, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
- "logs": serialize(self.logs, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
- "message": serialize(self.message, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
- "artifacts": serialize(self.artifacts, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
+ "outputs": serialize(self.outputs, max_length=get_max_text_length(), max_items=get_max_items_length()),
+ "logs": serialize(self.logs, max_length=get_max_text_length(), max_items=get_max_items_length()),
+ "message": serialize(self.message, max_length=get_max_text_length(), max_items=get_max_items_length()),
+ "artifacts": serialize(self.artifacts, max_length=get_max_text_length(), max_items=get_max_items_length()),
"timedelta": self.timedelta,
"duration": self.duration,
"used_frozen_result": self.used_frozen_result,
@@ -309,7 +318,15 @@ class VertexBuildResponse(BaseModel):
@field_serializer("data")
def serialize_data(self, data: ResultDataResponse) -> dict:
- return serialize(data, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH)
+ """Serialize a ResultDataResponse object into a dictionary with enforced maximum text and item lengths.
+
+ Parameters:
+ data (ResultDataResponse): The data object to serialize.
+
+ Returns:
+ dict: The serialized representation of the data with truncation applied.
+ """
+ return serialize(data, max_length=get_max_text_length(), max_items=get_max_items_length())
class VerticesBuiltResponse(BaseModel):
diff --git a/src/backend/base/langflow/graph/utils.py b/src/backend/base/langflow/graph/utils.py
index 4689c116a..e35d6faed 100644
--- a/src/backend/base/langflow/graph/utils.py
+++ b/src/backend/base/langflow/graph/utils.py
@@ -11,8 +11,7 @@ from loguru import logger
from langflow.interface.utils import extract_input_variables_from_prompt
from langflow.schema.data import Data
from langflow.schema.message import Message
-from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
-from langflow.serialization.serialization import serialize
+from langflow.serialization.serialization import get_max_items_length, get_max_text_length, serialize
from langflow.services.database.models.transactions.crud import log_transaction as crud_log_transaction
from langflow.services.database.models.transactions.model import TransactionBase
from langflow.services.database.models.vertex_builds.crud import log_vertex_build as crud_log_vertex_build
@@ -116,6 +115,13 @@ def _vertex_to_primitive_dict(target: Vertex) -> dict:
async def log_transaction(
flow_id: str | UUID, source: Vertex, status, target: Vertex | None = None, error=None
) -> None:
+ """Asynchronously logs a transaction record for a vertex in a flow if transaction storage is enabled.
+
+ Serializes the source vertex's primitive parameters and result, handling pandas DataFrames as needed,
+ and records transaction details including inputs, outputs, status, error, and flow ID in the database.
+ If the flow ID is not provided, attempts to retrieve it from the source vertex's graph.
+ Logs warnings and errors on serialization or database failures.
+ """
try:
if not get_settings_service().settings.transactions_storage_enabled:
return
@@ -143,8 +149,8 @@ async def log_transaction(
transaction = TransactionBase(
vertex_id=source.id,
target_id=target.id if target else None,
- inputs=serialize(inputs, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
- outputs=serialize(outputs, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
+ inputs=serialize(inputs, max_length=get_max_text_length(), max_items=get_max_items_length()),
+ outputs=serialize(outputs, max_length=get_max_text_length(), max_items=get_max_items_length()),
status=status,
error=error,
flow_id=flow_id if isinstance(flow_id, UUID) else UUID(flow_id),
@@ -167,17 +173,22 @@ async def log_vertex_build(
data: ResultDataResponse | dict,
artifacts: dict | None = None,
) -> None:
+ """Asynchronously logs a vertex build record to the database if vertex build storage is enabled.
+
+ Serializes the provided data and artifacts with configurable length and item limits before storing.
+ Converts parameters to string if present. Handles exceptions by logging errors.
+ """
try:
if not get_settings_service().settings.vertex_builds_storage_enabled:
return
vertex_build = VertexBuildBase(
- flow_id=flow_id,
+ flow_id=flow_id if isinstance(flow_id, UUID) else UUID(flow_id),
id=vertex_id,
valid=valid,
params=str(params) if params else None,
- data=serialize(data, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
- artifacts=serialize(artifacts, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH),
+ data=serialize(data, max_length=get_max_text_length(), max_items=get_max_items_length()),
+ artifacts=serialize(artifacts, max_length=get_max_text_length(), max_items=get_max_items_length()),
)
async with session_getter(get_db_service()) as session:
inserted = await crud_log_vertex_build(session, vertex_build)
diff --git a/src/backend/base/langflow/serialization/serialization.py b/src/backend/base/langflow/serialization/serialization.py
index 276b38d25..cc53ce6da 100644
--- a/src/backend/base/langflow/serialization/serialization.py
+++ b/src/backend/base/langflow/serialization/serialization.py
@@ -1,6 +1,7 @@
from collections.abc import AsyncIterator, Generator, Iterator
from datetime import datetime, timezone
from decimal import Decimal
+from functools import lru_cache
from typing import Any, cast
from uuid import UUID
@@ -12,6 +13,7 @@ from pydantic import BaseModel
from pydantic.v1 import BaseModel as BaseModelV1
from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
+from langflow.services.deps import get_settings_service
# Sentinel variable to signal a failed serialization.
@@ -25,8 +27,28 @@ class _UnserializableSentinel:
UNSERIALIZABLE_SENTINEL = _UnserializableSentinel()
+@lru_cache(maxsize=1)
+def get_max_text_length() -> int:
+ """Return the maximum allowed text length for serialization from the current settings."""
+ return get_settings_service().settings.max_text_length
+
+
+@lru_cache(maxsize=1)
+def get_max_items_length() -> int:
+ """Return the maximum allowed number of items for serialization, as defined in the current settings."""
+ return get_settings_service().settings.max_items_length
+
+
def _serialize_str(obj: str, max_length: int | None, _) -> str:
- """Truncate long strings with ellipsis if max_length provided."""
+ """Truncates a string to the specified maximum length, appending an ellipsis if truncation occurs.
+
+ Parameters:
+ obj (str): The string to be truncated.
+ max_length (int | None): The maximum allowed length of the string. If None, no truncation is performed.
+
+ Returns:
+ str: The original or truncated string, with an ellipsis appended if truncated.
+ """
if max_length is None or len(obj) <= max_length:
return obj
return obj[:max_length] + "..."
diff --git a/src/backend/base/langflow/services/database/models/transactions/model.py b/src/backend/base/langflow/services/database/models/transactions/model.py
index 627bf7101..333da5108 100644
--- a/src/backend/base/langflow/services/database/models/transactions/model.py
+++ b/src/backend/base/langflow/services/database/models/transactions/model.py
@@ -4,8 +4,7 @@ from uuid import UUID, uuid4
from pydantic import field_serializer, field_validator
from sqlmodel import JSON, Column, Field, SQLModel
-from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
-from langflow.serialization.serialization import serialize
+from langflow.serialization.serialization import get_max_items_length, get_max_text_length, serialize
class TransactionBase(SQLModel):
@@ -33,11 +32,27 @@ class TransactionBase(SQLModel):
@field_serializer("inputs")
def serialize_inputs(self, data) -> dict:
- return serialize(data, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH)
+ """Serialize the transaction's input data with enforced limits on text length and item count.
+
+ Parameters:
+ data (dict): The input data to be serialized.
+
+ Returns:
+ dict: The serialized input data with applied constraints.
+ """
+ return serialize(data, max_length=get_max_text_length(), max_items=get_max_items_length())
@field_serializer("outputs")
def serialize_outputs(self, data) -> dict:
- return serialize(data, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH)
+ """Serialize the outputs dictionary with enforced limits on text length and item count.
+
+ Parameters:
+ data (dict): The outputs data to serialize.
+
+ Returns:
+ dict: The serialized outputs dictionary with applied constraints.
+ """
+ return serialize(data, max_length=get_max_text_length(), max_items=get_max_items_length())
class TransactionTable(TransactionBase, table=True): # type: ignore[call-arg]
diff --git a/src/backend/base/langflow/services/database/models/vertex_builds/model.py b/src/backend/base/langflow/services/database/models/vertex_builds/model.py
index 411438099..53f4e4c5d 100644
--- a/src/backend/base/langflow/services/database/models/vertex_builds/model.py
+++ b/src/backend/base/langflow/services/database/models/vertex_builds/model.py
@@ -5,8 +5,7 @@ from pydantic import BaseModel, field_serializer, field_validator
from sqlalchemy import Text
from sqlmodel import JSON, Column, Field, SQLModel
-from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
-from langflow.serialization.serialization import serialize
+from langflow.serialization.serialization import get_max_items_length, get_max_text_length, serialize
class VertexBuildBase(SQLModel):
@@ -40,15 +39,30 @@ class VertexBuildBase(SQLModel):
@field_serializer("data")
def serialize_data(self, data) -> dict:
- return serialize(data, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH)
+ """Serializes the `data` field with enforced limits on text length and item count.
+
+ Returns:
+ dict: The serialized representation of the data field.
+ """
+ return serialize(data, max_length=get_max_text_length(), max_items=get_max_items_length())
@field_serializer("artifacts")
def serialize_artifacts(self, data) -> dict:
- return serialize(data, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH)
+ """Serializes the artifacts field, applying limits on text length and item count.
+
+ Returns:
+ dict: The serialized artifacts dictionary with enforced size constraints.
+ """
+ return serialize(data, max_length=get_max_text_length(), max_items=get_max_items_length())
@field_serializer("params")
def serialize_params(self, data) -> str:
- return serialize(data, max_length=MAX_TEXT_LENGTH, max_items=MAX_ITEMS_LENGTH)
+ """Serialize the `params` field to a string with enforced limits on text length and item count.
+
+ Returns:
+ str: The serialized representation of the `params` data.
+ """
+ return serialize(data, max_length=get_max_text_length(), max_items=get_max_items_length())
class VertexBuildTable(VertexBuildBase, table=True): # type: ignore[call-arg]
diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py
index 88e41f8ef..d3d4b5715 100644
--- a/src/backend/base/langflow/services/settings/base.py
+++ b/src/backend/base/langflow/services/settings/base.py
@@ -20,6 +20,7 @@ from pydantic_settings import (
)
from typing_extensions import override
+from langflow.serialization.constants import MAX_ITEMS_LENGTH, MAX_TEXT_LENGTH
from langflow.services.settings.constants import VARIABLES_TO_GET_FROM_ENVIRONMENT
from langflow.utils.util_strings import is_valid_database_url
@@ -229,6 +230,12 @@ class Settings(BaseSettings):
"""Path to the SSL certificate file on the local system."""
ssl_key_file: str | None = None
"""Path to the SSL key file on the local system."""
+ max_text_length: int = MAX_TEXT_LENGTH
+ """Maximum number of characters to store and display in the UI. Responses longer than this
+ will be truncated when displayed in the UI. Does not truncate responses between components nor outputs."""
+ max_items_length: int = MAX_ITEMS_LENGTH
+ """Maximum number of items to store and display in the UI. Lists longer than this
+ will be truncated when displayed in the UI. Does not affect data passed between components nor outputs."""
# MCP Server
mcp_server_enabled: bool = True