feat: import Graph without position information (#3203)

* refactor: Add utility functions for getting handle IDs in CustomNodes

- Added `getRightHandleId` function to generate the right handle ID for source handles.
- Added `getLeftHandleId` function to generate the left handle ID for target handles.
- These functions improve code readability and maintainability by encapsulating the logic for generating handle IDs.

* refactor: Add type for escaped handle IDs in edges to improve type safety in reactflowUtils

* feat: Add function to escape handle IDs in edges, enhancing edge management in reactflowUtils

* feat: Add function to check edges without escaped handle IDs, improving edge validation in reactflowUtils

* feat: Enhance edge processing in reactflowUtils to handle edges without escaped handle IDs more effectively

* feat: Add layoutUtils module for handling node layout using elkjs

* feat: update processDataFromFlow to add layout to nodes if needed

* Refactor import paths to use 'initialize' module in 'base.py'

* feat: Add method to set class source code and integrate it with frontend node input field

* refactor: Update sourceHandle dataType to use custom component class name

* fix: Raise error for unknown vertex types instead of returning default Vertex class

* refactor: Remove redundant call to _import_vertex_types() in VertexTypesDict initialization

* refactor: Simplify add_code_field by removing unnecessary field_config parameter from function signature

* feat: Add elkjs dependency to package.json and package-lock.json for enhanced functionality in the frontend

* refactor: Update fields type in Template class to use InputTypes for improved type safety

* refactor: Reorganize imports in __init__.py for better structure and consistency across the inputs module

* refactor: Clean up imports in types.py for better organization and consistency in the graph vertex module

* refactor: Change vertex type annotations to strings for better compatibility and consistency in the graph module methods

* refactor: Update component instantiation to include _code parameter and fix input type annotations for improved type handling

* refactor: Remove unused CustomComponent import from __init__.py for cleaner module structure and improved organization

* refactor: Modify custom_component instantiation to include _code argument for enhanced functionality and clarity in CodeParser class

* refactor: Update CustomComponent import in __init__.py for improved module structure and organization

* refactor: Update launch.json to include correct path for backend source files

* refactor: Update dependencies in poetry.lock to latest versions and resolve merge conflicts in backend files

* refactor: Update dataType assignment in Component class to use component name if available, or fallback to class name

* refactor: Correct flow_id reference in MemoryComponent to improve clarity and consistency in memory handling

* refactor: Update import path for DefaultPromptField to improve code organization and maintainability in api_utils.py

* refactor: Add loading module to __init__.py for improved organization of interface package

* refactor: Clean up imports in base.py and enforce edge validation in Graph class for improved maintainability and error handling

* refactor: Remove edge component additions in test_base.py to streamline graph tests and emphasize error handling for unprepared graphs

* refactor: Mark @clack/prompts is-unicode-supported as extraneous in package-lock.json for better dependency management

* refactor: Update dataType assignment in Component class to use component name if available, or fallback to class name

* refactor: Fix edge existence check in Graph class to use correct variable, ensuring accurate validation of graph structure

* refactor: Add test for graph with edge and improve graph preparation logic

* refactor: Set default node type to "genericNode" in getLayoutedNodes for consistent layout structure

* create consts for node widht and height

* refactor: Catch and log errors when processing flow data in flowsManagerStore and reactflowUtils

* [autofix.ci] apply automated fixes

* fix: Validate custom components for source and target vertices in Graph edges

* test: Add fixture for client and raise TypeError for invalid class parsing in CodeParser tests

* test: Add unit test for listing flows as Flow objects in custom component with client

* test: Update assertions for memory chatbot component types in unit tests

* test: Refactor assertions to use updated component names in vector store RAG unit tests

* fix: Change error handling to return default Vertex for unknown node types in graph class

* [autofix.ci] apply automated fixes

* test: Add pytest fixture for CustomComponent in unit tests to enhance test structure and readability

* chore: Update component names in vector store RAG unit tests

* test: Refactor imports and make flow name generation unique in database unit tests

* chore: Remove unused upload and flow management functions from flowsManagerStore for cleaner codebase

* chore: Await processDataFromFlow in useAddFlow hook

* chore: Correct NODE_HEIGHT calculation to use NODE_WIDTH constant for consistency in constants file

* chore: Remove extraneous flag for is-unicode-supported in package-lock.json for cleaner dependency management

---------

Co-authored-by: anovazzi1 <otavio2204@gmail.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-08-07 11:04:48 -03:00 committed by GitHub
commit 919073a0db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 364 additions and 106 deletions

3
.vscode/launch.json vendored
View file

@ -1,7 +1,6 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Backend",
"type": "debugpy",
@ -18,7 +17,7 @@
"--loop",
"asyncio",
"--reload-include",
"src/backend/*"
"./src/backend/*"
],
"jinja": true,
"justMyCode": false,

8
poetry.lock generated
View file

@ -4745,19 +4745,19 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
[[package]]
name = "langchain-anthropic"
version = "0.1.20"
version = "0.1.21"
description = "An integration package connecting AnthropicMessages and LangChain"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_anthropic-0.1.20-py3-none-any.whl", hash = "sha256:3a0d89ac6856be98beb3ec63813393bf29af3c5134247979c055938e741b7d9d"},
{file = "langchain_anthropic-0.1.20.tar.gz", hash = "sha256:cb9607fecfc0f0de49b79dd0fc066790e2877873ef753abd98d2ae38d6e0f5b2"},
{file = "langchain_anthropic-0.1.21-py3-none-any.whl", hash = "sha256:74094162e0badd9f5d275a1e2c4019303fbb45638091c202dcaab5f88fda97aa"},
{file = "langchain_anthropic-0.1.21.tar.gz", hash = "sha256:04131e024f79c6a60837a4f3b1399b90440da7a9b0c984b92704e66c317f6c5b"},
]
[package.dependencies]
anthropic = ">=0.28.0,<1"
defusedxml = ">=0.7.1,<0.8.0"
langchain-core = ">=0.2.17,<0.3"
langchain-core = ">=0.2.24,<0.3"
[[package]]
name = "langchain-astradb"

View file

@ -1,13 +1,13 @@
from langchain.memory import ConversationBufferMemory
from langflow.custom import Component
from langflow.field_typing import BaseChatMemory
from langflow.helpers.data import data_to_text
from langflow.inputs import HandleInput
from langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output
from langflow.memory import get_messages, LCBuiltinChatMemory
from langflow.memory import LCBuiltinChatMemory, get_messages
from langflow.schema import Data
from langflow.schema.message import Message
from langflow.field_typing import BaseChatMemory
from langchain.memory import ConversationBufferMemory
from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER
@ -118,5 +118,5 @@ class MemoryComponent(Component):
if self.memory:
chat_memory = self.memory
else:
chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)
chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)
return ConversationBufferMemory(chat_memory=chat_memory)

View file

@ -1,4 +1,4 @@
from langflow.custom.custom_component import CustomComponent
from langflow.custom.custom_component.component import Component
from langflow.custom.custom_component.custom_component import CustomComponent
__all__ = ["CustomComponent", "Component"]

View file

@ -384,7 +384,7 @@ class CodeParser:
def execute_and_inspect_classes(self, code: str):
custom_component_class = eval_custom_component_code(code)
custom_component = custom_component_class()
custom_component = custom_component_class(_code=code)
dunder_class = custom_component.__class__
# Get the base classes at two levels of inheritance
bases = []

View file

@ -1,3 +0,0 @@
from .custom_component import CustomComponent
__all__ = ["CustomComponent"]

View file

@ -97,7 +97,7 @@ class BaseComponent:
return {}
cc_class = eval_custom_component_code(self._code)
component_instance = cc_class()
component_instance = cc_class(_code=self._code)
template_config = self.get_template_config(component_instance)
return template_config

View file

@ -6,9 +6,7 @@ import nanoid # type: ignore
import yaml
from pydantic import BaseModel
from langflow.graph.edge.schema import EdgeData
from langflow.helpers.custom import format_type
from langflow.inputs.inputs import InputTypes
from langflow.schema.artifact import get_artifact_type, post_process_raw
from langflow.schema.data import Data
from langflow.schema.message import Message
@ -20,13 +18,15 @@ from langflow.utils.async_helpers import run_until_complete
from .custom_component import CustomComponent
if TYPE_CHECKING:
from langflow.graph.edge.schema import EdgeData
from langflow.graph.vertex.base import Vertex
from langflow.inputs.inputs import InputTypes
BACKWARDS_COMPATIBLE_ATTRIBUTES = ["user_id", "vertex", "tracing_service"]
class Component(CustomComponent):
inputs: List[InputTypes] = []
inputs: List["InputTypes"] = []
outputs: List[Output] = []
code_class_base_inheritance: ClassVar[str] = "Component"
_output_logs: dict[str, Log] = {}
@ -41,7 +41,7 @@ class Component(CustomComponent):
config[key] = value
else:
inputs[key] = value
self._inputs: dict[str, InputTypes] = {}
self._inputs: dict[str, "InputTypes"] = {}
self._outputs: dict[str, Output] = {}
self._results: dict[str, Any] = {}
self._attributes: dict[str, Any] = {}
@ -64,6 +64,20 @@ class Component(CustomComponent):
self.map_outputs(self.outputs)
# Set output types
self._set_output_types()
self.set_class_code()
def set_class_code(self):
# Get the source code of the calling class
if self._code:
return
try:
module = inspect.getmodule(self.__class__)
if module is None:
raise ValueError("Could not find module for class")
class_code = inspect.getsource(module)
self._code = class_code
except OSError:
raise ValueError(f"Could not find source code for {self.__class__.__name__}")
def set(self, **kwargs):
"""
@ -174,7 +188,7 @@ class Component(CustomComponent):
raise ValueError("Output name cannot be None.")
self._outputs[output.name] = output
def map_inputs(self, inputs: List[InputTypes]):
def map_inputs(self, inputs: List["InputTypes"]):
"""
Maps the given inputs to the component.
@ -272,7 +286,7 @@ class Component(CustomComponent):
"target": self._id,
"data": {
"sourceHandle": {
"dataType": self.name,
"dataType": component.name or component.__class__.__name__,
"id": component._id,
"name": output.name,
"output_types": output.types,
@ -366,6 +380,21 @@ class Component(CustomComponent):
self._map_parameters_on_template(frontend_node_dict["template"])
frontend_node = ComponentFrontendNode.from_dict(frontend_node_dict)
if not self._code:
self.set_class_code()
code_field = Input(
dynamic=True,
required=True,
placeholder="",
multiline=True,
value=self._code,
password=False,
name="code",
advanced=True,
field_type="code",
is_list=False,
)
frontend_node.template.add_field(code_field)
for output in frontend_node.outputs:
if output.types:
@ -379,7 +408,7 @@ class Component(CustomComponent):
data = {
"data": {
"node": frontend_node.to_dict(keep_name=False),
"type": self.__class__.__name__,
"type": self.name or self.__class__.__name__,
}
}
return data

View file

@ -283,7 +283,7 @@ def get_component_instance(custom_component: CustomComponent, user_id: Optional[
) from exc
try:
custom_instance = custom_class(_user_id=user_id)
custom_instance = custom_class(_user_id=user_id, _code=custom_component._code)
return custom_instance
except Exception as exc:
logger.error(f"Error while instantiating custom component: {str(exc)}")
@ -339,7 +339,7 @@ def run_build_config(
raise exc
def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code, field_config):
def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code):
code_field = Input(
dynamic=True,
required=True,
@ -364,7 +364,7 @@ def build_custom_component_template_from_inputs(
cc_instance = get_component_instance(custom_component, user_id=user_id)
field_config = cc_instance.get_template_config(cc_instance)
frontend_node = ComponentFrontendNode.from_inputs(**field_config)
frontend_node = add_code_field(frontend_node, custom_component._code, field_config.get("code", {}))
frontend_node = add_code_field(frontend_node, custom_component._code)
# But we now need to calculate the return_type of the methods in the outputs
for output in frontend_node.outputs:
if output.types:
@ -408,7 +408,7 @@ def build_custom_component_template(
add_extra_fields(frontend_node, field_config, entrypoint_args)
frontend_node = add_code_field(frontend_node, custom_component._code, field_config.get("code", {}))
frontend_node = add_code_field(frontend_node, custom_component._code)
add_base_classes(frontend_node, custom_component.get_function_entrypoint_return_type)
add_output_types(frontend_node, custom_component.get_function_entrypoint_return_type)

View file

@ -182,12 +182,17 @@ class Graph:
if not isinstance(target_vertex, ComponentVertex):
raise ValueError(f"Target vertex {target_id} is not a component vertex.")
output_name, input_name = output_input_tuple
if source_vertex._custom_component is None:
raise ValueError(f"Source vertex {source_id} does not have a custom component.")
if target_vertex._custom_component is None:
raise ValueError(f"Target vertex {target_id} does not have a custom component.")
edge_data: EdgeData = {
"source": source_id,
"target": target_id,
"data": {
"sourceHandle": {
"dataType": source_vertex.base_name,
"dataType": source_vertex._custom_component.name
or source_vertex._custom_component.__class__.__name__,
"id": source_vertex.id,
"name": output_name,
"output_types": source_vertex.get_output(output_name).types,
@ -676,7 +681,7 @@ class Graph:
"flow_name": self.flow_name,
}
def build_graph_maps(self, edges: Optional[List[ContractEdge]] = None, vertices: Optional[List[Vertex]] = None):
def build_graph_maps(self, edges: Optional[List[ContractEdge]] = None, vertices: Optional[List["Vertex"]] = None):
"""
Builds the adjacency maps for the graph.
"""
@ -738,7 +743,7 @@ class Graph:
return edge
return None
def build_parent_child_map(self, vertices: List[Vertex]):
def build_parent_child_map(self, vertices: List["Vertex"]):
parent_child_map = defaultdict(list)
for vertex in vertices:
parent_child_map[vertex.id] = [child.id for child in self.get_successors(vertex)]
@ -834,7 +839,7 @@ class Graph:
# both graphs have the same vertices and edges
# but the data of the vertices might be different
def update_edges_from_vertex(self, vertex: Vertex, other_vertex: Vertex) -> None:
def update_edges_from_vertex(self, vertex: "Vertex", other_vertex: "Vertex") -> None:
"""Updates the edges of a vertex in the Graph."""
new_edges = []
for edge in self.edges:
@ -844,13 +849,13 @@ class Graph:
new_edges += other_vertex.edges
self.edges = new_edges
def vertex_data_is_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool:
def vertex_data_is_identical(self, vertex: "Vertex", other_vertex: "Vertex") -> bool:
data_is_equivalent = vertex == other_vertex
if not data_is_equivalent:
return False
return self.vertex_edges_are_identical(vertex, other_vertex)
def vertex_edges_are_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool:
def vertex_edges_are_identical(self, vertex: "Vertex", other_vertex: "Vertex") -> bool:
same_length = len(vertex.edges) == len(other_vertex.edges)
if not same_length:
return False
@ -909,7 +914,7 @@ class Graph:
self.increment_update_count()
return self
def update_vertex_from_another(self, vertex: Vertex, other_vertex: Vertex) -> None:
def update_vertex_from_another(self, vertex: "Vertex", other_vertex: "Vertex") -> None:
"""
Updates a vertex from another vertex.
@ -947,12 +952,12 @@ class Graph:
self.vertices.append(vertex)
self.vertex_map[vertex.id] = vertex
def add_vertex(self, vertex: Vertex) -> None:
def add_vertex(self, vertex: "Vertex") -> None:
"""Adds a new vertex to the graph."""
self._add_vertex(vertex)
self._update_edges(vertex)
def _update_edges(self, vertex: Vertex) -> None:
def _update_edges(self, vertex: "Vertex") -> None:
"""Updates the edges of a vertex."""
# Vertex has edges, so we need to update the edges
for edge in vertex.edges:
@ -987,19 +992,19 @@ class Graph:
for vertex in self.vertices:
vertex._build_params()
def _validate_vertex(self, vertex: Vertex) -> bool:
def _validate_vertex(self, vertex: "Vertex") -> bool:
"""Validates a vertex."""
# All vertices that do not have edges are invalid
return len(self.get_vertex_edges(vertex.id)) > 0
def get_vertex(self, vertex_id: str, silent: bool = False) -> Vertex:
def get_vertex(self, vertex_id: str, silent: bool = False) -> "Vertex":
"""Returns a vertex by id."""
try:
return self.vertex_map[vertex_id]
except KeyError:
raise ValueError(f"Vertex {vertex_id} not found")
def get_root_of_group_node(self, vertex_id: str) -> Vertex:
def get_root_of_group_node(self, vertex_id: str) -> "Vertex":
"""Returns the root of a group node."""
if vertex_id in self.top_level_vertices:
# Get all vertices with vertex_id as .parent_node_id
@ -1157,9 +1162,9 @@ class Graph:
or (edge.target_id == vertex_id and is_target is not False)
]
def get_vertices_with_target(self, vertex_id: str) -> List[Vertex]:
def get_vertices_with_target(self, vertex_id: str) -> List["Vertex"]:
"""Returns the vertices connected to a vertex."""
vertices: List[Vertex] = []
vertices: List["Vertex"] = []
for edge in self.edges:
if edge.target_id == vertex_id:
vertex = self.get_vertex(edge.source_id)
@ -1246,7 +1251,7 @@ class Graph:
"""Executes tasks in parallel, handling exceptions for each task."""
results = []
completed_tasks = await asyncio.gather(*tasks, return_exceptions=True)
vertices: List[Vertex] = []
vertices: List["Vertex"] = []
for i, result in enumerate(completed_tasks):
task_name = tasks[i].get_name()
@ -1273,7 +1278,7 @@ class Graph:
no_duplicate_results = list(set(results))
return no_duplicate_results
def topological_sort(self) -> List[Vertex]:
def topological_sort(self) -> List["Vertex"]:
"""
Performs a topological sort of the vertices in the graph.
@ -1306,7 +1311,7 @@ class Graph:
return list(reversed(sorted_vertices))
def generator_build(self) -> Generator[Vertex, None, None]:
def generator_build(self) -> Generator["Vertex", None, None]:
"""Builds each vertex in the graph and yields it."""
sorted_vertices = self.topological_sort()
logger.debug("There are %s vertices in the graph", len(sorted_vertices))
@ -1316,7 +1321,7 @@ class Graph:
"""Returns the predecessors of a vertex."""
return [self.get_vertex(source_id) for source_id in self.predecessor_map.get(vertex.id, [])]
def get_all_successors(self, vertex: Vertex, recursive=True, flat=True):
def get_all_successors(self, vertex: "Vertex", recursive=True, flat=True):
# Recursively get the successors of the current vertex
# successors = vertex.successors
# if not successors:
@ -1353,13 +1358,13 @@ class Graph:
successors_result.append([successor])
return successors_result
def get_successors(self, vertex: Vertex) -> List[Vertex]:
def get_successors(self, vertex: "Vertex") -> List["Vertex"]:
"""Returns the successors of a vertex."""
return [self.get_vertex(target_id) for target_id in self.successor_map.get(vertex.id, [])]
def get_vertex_neighbors(self, vertex: Vertex) -> Dict[Vertex, int]:
def get_vertex_neighbors(self, vertex: "Vertex") -> Dict["Vertex", int]:
"""Returns the neighbors of a vertex."""
neighbors: Dict[Vertex, int] = {}
neighbors: Dict["Vertex", int] = {}
for edge in self.edges:
if edge.source_id == vertex.id:
neighbor = self.get_vertex(edge.target_id)
@ -1387,7 +1392,8 @@ class Graph:
for edge in self._edges:
new_edge = self.build_edge(edge)
edges.add(new_edge)
if self.vertices and not edges:
raise ValueError("Graph has vertices but no edges")
return list(edges)
def build_edge(self, edge: EdgeData) -> ContractEdge:
@ -1401,7 +1407,7 @@ class Graph:
new_edge = ContractEdge(source, target, edge)
return new_edge
def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> Type[Vertex]:
def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> Type["Vertex"]:
"""Returns the node class based on the node type."""
# First we check for the node_base_type
node_name = node_id.split("-")[0]
@ -1416,15 +1422,11 @@ class Graph:
if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type]
return (
lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_base_type]
if node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP
else Vertex
)
return Vertex
def _build_vertices(self) -> List[Vertex]:
def _build_vertices(self) -> List["Vertex"]:
"""Builds the vertices of the graph."""
vertices: List[Vertex] = []
vertices: List["Vertex"] = []
for frontend_data in self._vertices:
try:
vertex_instance = self.get_vertex(frontend_data["id"])
@ -1452,6 +1454,7 @@ class Graph:
raise ValueError("You can only provide one of stop_component_id or start_component_id")
self.validate_stream()
self.edges = self._build_edges()
if stop_component_id or start_component_id:
try:
first_layer = self.sort_vertices(stop_component_id, start_component_id)
@ -1485,7 +1488,7 @@ class Graph:
def layered_topological_sort(
self,
vertices: List[Vertex],
vertices: List["Vertex"],
filter_graphs: bool = False,
) -> List[List[str]]:
"""Performs a layered topological sort of the vertices in the graph."""

View file

@ -19,7 +19,7 @@ def _import_vertex_types():
class VertexTypesDict(LazyLoadDictBase):
def __init__(self):
self._all_types_dict = None
self._types = _import_vertex_types()
self._types = _import_vertex_types
@property
def VERTEX_TYPE_MAP(self):
@ -33,14 +33,15 @@ class VertexTypesDict(LazyLoadDictBase):
}
def get_type_dict(self):
types = self._types()
return {
**{t: self._types.CustomComponentVertex for t in ["CustomComponent"]},
**{t: self._types.ComponentVertex for t in ["Component"]},
**{t: self._types.InterfaceVertex for t in CHAT_COMPONENTS},
**{t: types.CustomComponentVertex for t in ["CustomComponent"]},
**{t: types.ComponentVertex for t in ["Component"]},
**{t: types.InterfaceVertex for t in CHAT_COMPONENTS},
}
def get_custom_component_vertex_type(self):
return self._types.CustomComponentVertex
return self._types().CustomComponentVertex
lazy_load_vertex_dict = VertexTypesDict()

View file

@ -14,7 +14,7 @@ from langflow.exceptions.component import ComponentBuildException
from langflow.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData
from langflow.graph.utils import UnbuiltObject, UnbuiltResult, log_transaction
from langflow.graph.vertex.schema import NodeData
from langflow.interface.initialize import loading
from langflow.interface import initialize
from langflow.interface.listing import lazy_load_dict
from langflow.schema.artifact import ArtifactType
from langflow.schema.data import Data
@ -453,10 +453,10 @@ class Vertex:
raise ValueError(f"Base type for vertex {self.display_name} not found")
if not self._custom_component:
custom_component, custom_params = await loading.instantiate_class(user_id=user_id, vertex=self)
custom_component, custom_params = await initialize.loading.instantiate_class(user_id=user_id, vertex=self)
else:
custom_component = self._custom_component
custom_params = loading.get_params(self.params)
custom_params = initialize.loading.get_params(self.params)
await self._build_results(custom_component, custom_params, fallback_to_env_vars)
@ -671,7 +671,7 @@ class Vertex:
async def _build_results(self, custom_component, custom_params, fallback_to_env_vars=False):
try:
result = await loading.get_instance_results(
result = await initialize.loading.get_instance_results(
custom_component=custom_component,
custom_params=custom_params,
vertex=self,

View file

@ -7,7 +7,7 @@ from langchain_core.messages import AIMessage, AIMessageChunk
from loguru import logger
from langflow.graph.schema import CHAT_COMPONENTS, RECORDS_COMPONENTS, InterfaceComponentTypes, ResultData
from langflow.graph.utils import UnbuiltObject, log_transaction, log_vertex_build, serialize_field
from langflow.graph.utils import UnbuiltObject, log_transaction, serialize_field
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.schema import NodeData
from langflow.inputs.inputs import InputTypes
@ -15,6 +15,7 @@ from langflow.schema import Data
from langflow.schema.artifact import ArtifactType
from langflow.schema.message import Message
from langflow.schema.schema import INPUT_FIELD_NAME
from langflow.graph.utils import log_vertex_build
from langflow.template.field.base import UNDEFINED, Output
from langflow.utils.schemas import ChatOutputResponse, DataOutputResponse
from langflow.utils.util import unescape_string

View file

@ -1,23 +1,24 @@
from .inputs import (
BoolInput,
DataInput,
DefaultPromptField,
DictInput,
DropdownInput,
MultiselectInput,
FileInput,
FloatInput,
HandleInput,
Input,
IntInput,
MessageInput,
MessageTextInput,
MultilineInput,
MultilineSecretInput,
MultiselectInput,
NestedDictInput,
PromptInput,
SecretStrInput,
StrInput,
TableInput,
DefaultPromptField,
)
__all__ = [
@ -39,5 +40,6 @@ __all__ = [
"StrInput",
"MessageTextInput",
"TableInput",
"Input",
"DefaultPromptField",
]

View file

@ -0,0 +1,3 @@
from . import loading
__all__ = ["loading"]

View file

@ -9,7 +9,7 @@ from langflow.utils.constants import DIRECT_TYPES
class Template(BaseModel):
type_name: str = Field(serialization_alias="_type")
fields: list[Union[Input, InputTypes]]
fields: list[InputTypes]
def process_fields(
self,

View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]]
name = "aiofiles"
@ -860,6 +860,7 @@ wrapt = ">=1.10,<2"
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
[[package]]
<<<<<<< Updated upstream
name = "devtools"
version = "0.12.2"
description = "Python's missing debug print command, and more."
@ -876,6 +877,8 @@ executing = ">=1.1.1"
pygments = ">=2.15.0"
[[package]]
=======
>>>>>>> Stashed changes
name = "dictdiffer"
version = "0.9.0"
description = "Dictdiffer is a library that helps you to diff and patch dictionaries."
@ -5256,4 +5259,8 @@ local = []
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
<<<<<<< Updated upstream
content-hash = "877648288f4f9d5d3304c36853d1b2c5f833d673665bf9b1c707c37bbf02e6ad"
=======
content-hash = "bbbc234350488c8293c4b74695db9757be15cabbf6df99ce24d569aecdcd503b"
>>>>>>> Stashed changes

View file

@ -21,13 +21,23 @@ async def test_graph_not_prepared():
graph = Graph()
graph.add_component("chat_input", chat_input)
graph.add_component("chat_output", chat_output)
graph.add_component_edge("chat_input", (chat_input.outputs[0].name, chat_input.inputs[0].name), "chat_output")
with pytest.raises(ValueError):
await graph.astep()
@pytest.mark.asyncio
async def test_graph():
chat_input = ChatInput()
chat_output = ChatOutput()
graph = Graph()
graph.add_component("chat_input", chat_input)
graph.add_component("chat_output", chat_output)
with pytest.raises(ValueError, match="Graph has vertices but no edges"):
graph.prepare()
@pytest.mark.asyncio
async def test_graph_with_edge():
chat_input = ChatInput()
chat_output = ChatOutput()
graph = Graph()

View file

@ -94,16 +94,16 @@ def test_memory_chatbot_dump_components_and_edges(memory_chatbot_graph: Graph):
assert nodes[0]["data"]["type"] == "ChatInput"
assert nodes[0]["id"] == "chat_input"
assert nodes[1]["data"]["type"] == "MemoryComponent"
assert nodes[1]["data"]["type"] == "Memory"
assert nodes[1]["id"] == "chat_memory"
assert nodes[2]["data"]["type"] == "ChatOutput"
assert nodes[2]["id"] == "chat_output"
assert nodes[3]["data"]["type"] == "OpenAIModelComponent"
assert nodes[3]["data"]["type"] == "OpenAIModel"
assert nodes[3]["id"] == "openai"
assert nodes[4]["data"]["type"] == "PromptComponent"
assert nodes[4]["data"]["type"] == "Prompt"
assert nodes[4]["id"] == "prompt"
# Check edges

View file

@ -134,16 +134,16 @@ def test_vector_store_rag_dump_components_and_edges(ingestion_graph, rag_graph):
ingestion_nodes = sorted(ingestion_nodes, key=lambda x: x["id"])
# Check components in the ingestion graph
assert ingestion_nodes[0]["data"]["type"] == "FileComponent"
assert ingestion_nodes[0]["data"]["type"] == "File"
assert ingestion_nodes[0]["id"] == "file-123"
assert ingestion_nodes[1]["data"]["type"] == "OpenAIEmbeddingsComponent"
assert ingestion_nodes[1]["data"]["type"] == "OpenAIEmbeddings"
assert ingestion_nodes[1]["id"] == "openai-embeddings-123"
assert ingestion_nodes[2]["data"]["type"] == "SplitTextComponent"
assert ingestion_nodes[2]["data"]["type"] == "SplitText"
assert ingestion_nodes[2]["id"] == "text-splitter-123"
assert ingestion_nodes[3]["data"]["type"] == "AstraVectorStoreComponent"
assert ingestion_nodes[3]["data"]["type"] == "AstraDB"
assert ingestion_nodes[3]["id"] == "vector-store-123"
# Check edges in the ingestion graph
@ -178,19 +178,19 @@ def test_vector_store_rag_dump_components_and_edges(ingestion_graph, rag_graph):
assert rag_nodes[1]["data"]["type"] == "ChatOutput"
assert rag_nodes[1]["id"] == "chatoutput-123"
assert rag_nodes[2]["data"]["type"] == "OpenAIModelComponent"
assert rag_nodes[2]["data"]["type"] == "OpenAIModel"
assert rag_nodes[2]["id"] == "openai-123"
assert rag_nodes[3]["data"]["type"] == "OpenAIEmbeddingsComponent"
assert rag_nodes[3]["data"]["type"] == "OpenAIEmbeddings"
assert rag_nodes[3]["id"] == "openai-embeddings-124"
assert rag_nodes[4]["data"]["type"] == "ParseDataComponent"
assert rag_nodes[4]["data"]["type"] == "ParseData"
assert rag_nodes[4]["id"] == "parse-data-123"
assert rag_nodes[5]["data"]["type"] == "PromptComponent"
assert rag_nodes[5]["data"]["type"] == "Prompt"
assert rag_nodes[5]["id"] == "prompt-123"
assert rag_nodes[6]["data"]["type"] == "AstraVectorStoreComponent"
assert rag_nodes[6]["data"]["type"] == "AstraDB"
assert rag_nodes[6]["id"] == "rag-vector-store-123"
# Check edges in the RAG graph

View file

@ -9,7 +9,12 @@ from langflow.custom import Component, CustomComponent
from langflow.custom.code_parser.code_parser import CodeParser, CodeSyntaxError
from langflow.custom.custom_component.base_component import BaseComponent, ComponentCodeNullError
from langflow.custom.utils import build_custom_component_template
from langflow.services.database.models.flow import Flow, FlowCreate
from langflow.services.database.models.flow import FlowCreate
@pytest.fixture
def client():
pass
@pytest.fixture
@ -168,7 +173,7 @@ def test_code_parser_parse_classes():
"""
Test the parse_classes method of the CodeParser class.
"""
parser = CodeParser("class Test: pass")
parser = CodeParser("from langflow.custom import Component\n\nclass Test(Component): pass")
tree = parser.get_tree()
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
@ -177,6 +182,18 @@ def test_code_parser_parse_classes():
assert parser.data["classes"][0]["name"] == "Test"
def test_code_parser_parse_classes_raises():
"""
Test the parse_classes method of the CodeParser class.
"""
parser = CodeParser("class Test: pass")
tree = parser.get_tree()
with pytest.raises(TypeError):
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
parser.parse_classes(node)
def test_code_parser_parse_global_vars():
"""
Test the parse_global_vars method of the CodeParser class.
@ -487,11 +504,6 @@ def test_list_flows_return_type(component):
assert isinstance(flows, list)
def test_list_flows_flow_objects(component):
flows = component.list_flows()
assert all(isinstance(flow, Flow) for flow in flows)
def test_build_config_return_type(component):
config = component.build_config()
assert isinstance(config, dict)

View file

@ -0,0 +1,23 @@
import pytest
from langflow.custom.custom_component.custom_component import CustomComponent
from langflow.services.database.models.flow import Flow
@pytest.fixture
def component(client, active_user):
return CustomComponent(
user_id=active_user.id,
field_config={
"fields": {
"llm": {"type": "str"},
"url": {"type": "str"},
"year": {"type": "int"},
}
},
)
def test_list_flows_flow_objects(component):
flows = component.list_flows()
assert all(isinstance(flow, Flow) for flow in flows)

View file

@ -1,26 +1,26 @@
import json
from collections import namedtuple
from uuid import UUID, uuid4
from langflow.graph.utils import log_transaction, log_vertex_build
import orjson
import pytest
from fastapi.testclient import TestClient
from sqlmodel import Session
from langflow.api.v1.schemas import FlowListCreate, ResultDataResponse
from langflow.initial_setup.setup import load_starter_projects, load_flows_from_directory
from langflow.graph.utils import log_transaction, log_vertex_build
from langflow.initial_setup.setup import load_flows_from_directory, load_starter_projects
from langflow.services.database.models.base import orjson_dumps
from langflow.services.database.models.flow import Flow, FlowCreate, FlowUpdate
from langflow.services.database.models.transactions.crud import get_transactions_by_flow_id
from langflow.services.database.utils import session_getter, migrate_transactions_from_monitor_service_to_database
from langflow.services.database.utils import migrate_transactions_from_monitor_service_to_database, session_getter
from langflow.services.deps import get_db_service, get_monitor_service, session_scope
from langflow.services.monitor.schema import TransactionModel
from langflow.services.monitor.utils import (
add_row_to_table,
drop_and_create_table_if_schema_mismatch,
new_duckdb_locked_connection,
add_row_to_table,
)
from collections import namedtuple
@pytest.fixture(scope="module")
@ -46,7 +46,7 @@ def test_create_flow(client: TestClient, json_flow: str, active_user, logged_in_
assert response.json()["name"] == flow.name
assert response.json()["data"] == flow.data
# flow is optional so we can create a flow without a flow
flow = FlowCreate(name="Test Flow")
flow = FlowCreate(name=str(uuid4()))
response = client.post("api/v1/flows/", json=flow.model_dump(exclude_unset=True), headers=logged_in_headers)
assert response.status_code == 201
assert response.json()["name"] == flow.name

View file

@ -45,6 +45,7 @@
"cmdk": "^1.0.0",
"dompurify": "^3.1.5",
"dotenv": "^16.4.5",
"elkjs": "^0.9.3",
"emoji-regex": "^10.3.0",
"esbuild": "^0.21.5",
"file-saver": "^2.0.5",
@ -7424,6 +7425,11 @@
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.806.tgz",
"integrity": "sha512-nkoEX2QIB8kwCOtvtgwhXWy2IHVcOLQZu9Qo36uaGB835mdX/h8uLRlosL6QIhLVUnAiicXRW00PwaPZC74Nrg=="
},
"node_modules/elkjs": {
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.3.tgz",
"integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ=="
},
"node_modules/emoji-regex": {
"version": "10.3.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.3.0.tgz",

View file

@ -40,6 +40,7 @@
"cmdk": "^1.0.0",
"dompurify": "^3.1.5",
"dotenv": "^16.4.5",
"elkjs": "^0.9.3",
"emoji-regex": "^10.3.0",
"esbuild": "^0.21.5",
"file-saver": "^2.0.5",

View file

@ -0,0 +1,30 @@
import { sourceHandleType, targetHandleType } from "@/types/flow";
import { scapedJSONStringfy } from "@/utils/reactflowUtils";
export function getRightHandleId({
output_types,
id,
dataType,
name,
}: sourceHandleType): string {
return scapedJSONStringfy({
dataType,
id,
output_types,
name,
});
}
export function getLeftHandleId({
inputTypes,
type,
fieldName,
id,
}: targetHandleType): string {
return scapedJSONStringfy({
inputTypes,
id,
type,
fieldName,
});
}

View file

@ -879,3 +879,6 @@ export const LANGFLOW_ACCESS_TOKEN_EXPIRE_SECONDS = 60 * 60 - 60 * 60 * 0.1;
export const LANGFLOW_ACCESS_TOKEN_EXPIRE_SECONDS_ENV =
Number(process.env.ACCESS_TOKEN_EXPIRE_SECONDS) -
Number(process.env.ACCESS_TOKEN_EXPIRE_SECONDS) * 0.1;
export const NODE_WIDTH = 384;
export const NODE_HEIGHT = NODE_WIDTH * 3;

View file

@ -34,7 +34,7 @@ const useAddFlow = () => {
return new Promise(async (resolve, reject) => {
const flow = cloneDeep(params?.flow) ?? undefined;
let flowData = flow
? processDataFromFlow(flow)
? await processDataFromFlow(flow)
: { nodes: [], edges: [], viewport: { zoom: 1, x: 0, y: 0 } };
flowData?.nodes.forEach((node) => {
updateGroupRecursion(

View file

@ -6,6 +6,10 @@ export type unselectAllNodesType = {
data: Node[];
};
export type addEscapedHandleIdsToEdgesType = {
edges: Edge[];
};
export type updateEdgesHandleIdsType = {
nodes: NodeType[];
edges: Edge[];

View file

@ -0,0 +1,76 @@
import { NODE_HEIGHT, NODE_WIDTH } from "@/constants/constants";
import { NodeType } from "@/types/flow";
import ELK, { ElkNode } from "elkjs/lib/elk.bundled.js";
import { cloneDeep } from "lodash";
import { Edge } from "reactflow";
const layoutOptions = {
"elk.algorithm": "layered",
"elk.direction": "RIGHT",
"elk.layered.spacing.edgeNodeBetweenLayers": "40",
"elk.spacing.nodeNode": "40",
"elk.layered.nodePlacement.strategy": "SIMPLE",
};
const elk = new ELK();
// uses elkjs to give each node a layouted position
export const getLayoutedNodes = async (nodes: NodeType[], edges: Edge[]) => {
const graph = {
id: "root",
layoutOptions,
children: cloneDeep(nodes).map((n) => {
const targetPorts = edges
.filter((e) => e.source === n.id)
.map((e) => ({
id: e.sourceHandle,
properties: {
side: "EAST",
},
}));
const sourcePorts = edges
.filter((e) => e.target === n.id)
.map((e) => ({
id: e.targetHandle,
properties: {
side: "WEST",
},
}));
return {
id: n.id,
width: NODE_WIDTH,
height: NODE_HEIGHT,
// ⚠️ we need to tell elk that the ports are fixed, in order to reduce edge crossings
properties: {
"org.eclipse.elk.portConstraints": "FIXED_ORDER",
},
// we are also passing the id, so we can also handle edges without a sourceHandle or targetHandle option
ports: [{ id: n.id }, ...targetPorts, ...sourcePorts],
};
}) as ElkNode[],
edges: edges.map((e) => ({
id: e.id,
sources: [e.sourceHandle || e.source],
targets: [e.targetHandle || e.target],
})),
};
const layoutedGraph = await elk.layout(graph);
const layoutedNodes = nodes.map((node) => {
const layoutedNode = layoutedGraph.children?.find(
(lgNode) => lgNode.id === node.id,
);
return {
...node,
position: {
x: layoutedNode?.x ?? 0,
y: layoutedNode?.y ?? 0,
},
type: "genericNode",
};
});
return layoutedNodes;
};

View file

@ -1,3 +1,7 @@
import {
getLeftHandleId,
getRightHandleId,
} from "@/CustomNodes/utils/get-handle-id";
import { cloneDeep } from "lodash";
import {
Connection,
@ -34,11 +38,13 @@ import {
targetHandleType,
} from "../types/flow";
import {
addEscapedHandleIdsToEdgesType,
findLastNodeType,
generateFlowType,
unselectAllNodesType,
updateEdgesHandleIdsType,
} from "../types/utils/reactflowUtils";
import { getLayoutedNodes } from "./layoutUtils";
import { createRandomKey, toTitleCase } from "./utils";
const uid = new ShortUniqueId();
@ -274,7 +280,7 @@ export function updateTemplate(
export const processFlows = (DbData: FlowType[], skipUpdate = true) => {
let savedComponents: { [key: string]: APIClassType } = {};
DbData.forEach((flow: FlowType) => {
DbData.forEach(async (flow: FlowType) => {
try {
if (!flow.data) {
return;
@ -290,15 +296,24 @@ export const processFlows = (DbData: FlowType[], skipUpdate = true) => {
] = cloneDeep((flow.data.nodes[0].data as NodeDataType).node!);
return;
}
processDataFromFlow(flow, !skipUpdate);
await processDataFromFlow(flow, !skipUpdate).catch((e) => {
console.error(e);
});
} catch (e) {
console.log(e);
console.error(e);
}
});
return { data: savedComponents, flows: DbData };
};
export const processDataFromFlow = (flow: FlowType, refreshIds = true) => {
const needsLayout = (nodes: NodeType[]) => {
return nodes.some((node) => !node.position);
};
export async function processDataFromFlow(
flow: FlowType,
refreshIds = true,
): Promise<ReactFlowJsonObject | null> {
let data = flow?.data ? flow.data : null;
if (data) {
processFlowEdges(flow);
@ -308,9 +323,14 @@ export const processDataFromFlow = (flow: FlowType, refreshIds = true) => {
updateEdges(data.edges);
// updateNodes(data.nodes, data.edges);
if (refreshIds) updateIds(data); // Assuming updateIds is defined elsewhere
// add layout to nodes if not present
if (needsLayout(data.nodes)) {
const layoutedNodes = await getLayoutedNodes(data.nodes, data.edges);
data.nodes = layoutedNodes;
}
}
return data;
};
}
export function updateIds(
{ edges, nodes }: { edges: Edge[]; nodes: Node[] },
@ -341,6 +361,7 @@ export function updateIds(
concatedEdges.forEach((edge: Edge) => {
edge.source = idsMap[edge.source];
edge.target = idsMap[edge.target];
const sourceHandleObject: sourceHandleType = scapeJSONParse(
edge.sourceHandle!,
);
@ -475,6 +496,26 @@ export function addVersionToDuplicates(flow: FlowType, flows: FlowType[]) {
return newName;
}
export function addEscapedHandleIdsToEdges({
edges,
}: addEscapedHandleIdsToEdgesType): Edge[] {
let newEdges = cloneDeep(edges);
newEdges.forEach((edge) => {
let escapedSourceHandle = edge.sourceHandle;
let escapedTargetHandle = edge.targetHandle;
if (!escapedSourceHandle) {
let sourceHandle = edge.data?.sourceHandle;
escapedSourceHandle = getRightHandleId(sourceHandle);
edge.sourceHandle = escapedSourceHandle;
}
if (!escapedTargetHandle) {
let targetHandle = edge.data?.targetHandle;
escapedTargetHandle = getLeftHandleId(targetHandle);
edge.targetHandle = escapedTargetHandle;
}
});
return newEdges;
}
export function updateEdgesHandleIds({
edges,
nodes,
@ -740,6 +781,13 @@ export function checkOldEdgesHandles(edges: Edge[]): boolean {
);
}
export function checkEdgeWithoutEscapedHandleIds(edges: Edge[]): boolean {
return edges.some(
(edge) =>
(!edge.sourceHandle || !edge.targetHandle) && edge.data?.sourceHandle,
);
}
export function checkOldNodesOutput(nodes: NodeType[]): boolean {
return nodes.some((node) => !node.data.node?.outputs);
}
@ -1231,7 +1279,10 @@ export function updateEdgesIds(
export function processFlowEdges(flow: FlowType) {
if (!flow.data || !flow.data.edges) return;
if (checkOldEdgesHandles(flow.data.edges)) {
if (checkEdgeWithoutEscapedHandleIds(flow.data.edges)) {
const newEdges = addEscapedHandleIdsToEdges({ edges: flow.data.edges });
flow.data.edges = newEdges;
} else if (checkOldEdgesHandles(flow.data.edges)) {
const newEdges = updateEdgesHandleIds(flow.data);
flow.data.edges = newEdges;
}