diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py index a8069dcf9..eb2f4b36c 100644 --- a/src/backend/base/langflow/base/models/model.py +++ b/src/backend/base/langflow/base/models/model.py @@ -7,6 +7,7 @@ from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langflow.custom import Component from langflow.field_typing.prompt import Prompt +from langflow.schema.message import Message class LCModelComponent(Component): @@ -92,7 +93,7 @@ class LCModelComponent(Component): if system_message: messages.append(SystemMessage(content=system_message)) if input_value: - if isinstance(input_value, Prompt): + if isinstance(input_value, Message): with warnings.catch_warnings(): warnings.simplefilter("ignore") if "prompt" in input_value: diff --git a/src/backend/base/langflow/graph/utils.py b/src/backend/base/langflow/graph/utils.py index 6d1dca0cb..7183b923f 100644 --- a/src/backend/base/langflow/graph/utils.py +++ b/src/backend/base/langflow/graph/utils.py @@ -54,6 +54,7 @@ def flatten_list(list_of_lists: list[Union[list, Any]]) -> list: def serialize_field(value): """Unified serialization function for handling both BaseModel and Document types, including handling lists of these types.""" + if isinstance(value, (list, tuple)): return [serialize_field(v) for v in value] elif isinstance(value, Document):