Merge remote-tracking branch 'origin/zustand/io/migration' into globalVariables

This commit is contained in:
Lucas Oliveira 2024-03-21 15:35:52 +01:00
commit 99561d0d4c
26 changed files with 1381 additions and 887 deletions

1372
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.7.0a0"
version = "1.0.0a0"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -27,7 +27,7 @@ langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.9,<3.12"
python = ">=3.10,<3.12"
duckdb = "^0.9.2"
fastapi = "^0.109.0"
uvicorn = "^0.27.0"
@ -50,7 +50,6 @@ fake-useragent = "^1.4.0"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^14.0.0"
tiktoken = "~0.6.0"
wikipedia = "^1.4.0"
qdrant-client = "^1.7.0"
weaviate-client = "*"
@ -60,7 +59,7 @@ cohere = "^4.47.0"
python-multipart = "^0.0.7"
sqlmodel = "^0.0.14"
faiss-cpu = "^1.7.4"
anthropic = "^0.15.0"
anthropic = "^0.21.0"
orjson = "3.9.15"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
@ -96,7 +95,7 @@ extract-msg = "^0.47.0"
jq = { version = "^1.6.0", markers = "sys_platform != 'win32'" }
boto3 = "^1.34.0"
numexpr = "^2.8.6"
qianfan = "0.3.0"
qianfan = "0.3.5"
pgvector = "^0.2.3"
pyautogen = "^0.2.0"
langchain-google-genai = "^0.0.6"
@ -104,9 +103,11 @@ elasticsearch = "^8.12.0"
pytube = "^15.0.0"
python-socketio = "^5.11.0"
llama-index = "^0.10.13"
langchain-openai = "^0.0.6"
langchain-openai = "^0.0.5"
unstructured = { extras = ["md"], version = "^0.12.4" }
dspy-ai = "^2.4.0"
crewai = "^0.22.5"
langchain-anthropic = "^0.1.4"
[tool.poetry.group.dev.dependencies]
pytest-asyncio = "^0.23.1"

View file

@ -13,6 +13,7 @@ from langflow.services.store.schema import StoreComponentCreate
from langflow.services.store.utils import get_lf_version_from_pypi
if TYPE_CHECKING:
from langflow.graph.vertex.base import Vertex
from langflow.services.database.models.flow.model import Flow
@ -238,3 +239,62 @@ def format_exception_message(exc: Exception) -> str:
if isinstance(causing_exception, SyntaxError):
return format_syntax_error_message(causing_exception)
return str(exc)
async def get_next_runnable_vertices(
graph: Graph,
vertex: "Vertex",
vertex_id: str,
chat_service: ChatService,
flow_id: str,
):
"""
Retrieves the next runnable vertices in the graph for a given vertex.
Args:
graph (Graph): The graph object representing the flow.
vertex (Vertex): The current vertex.
vertex_id (str): The ID of the current vertex.
chat_service (ChatService): The chat service object.
flow_id (str): The ID of the flow.
Returns:
list: A list of IDs of the next runnable vertices.
"""
async with chat_service._cache_locks[flow_id] as lock:
graph.remove_from_predecessors(vertex_id)
direct_successors_ready = [v for v in vertex.successors_ids if graph.is_vertex_runnable(v)]
if not direct_successors_ready:
# No direct successors ready, look for runnable predecessors of successors
next_runnable_vertices = graph.find_runnable_predecessors_for_successors(vertex_id)
else:
next_runnable_vertices = direct_successors_ready
for v_id in set(next_runnable_vertices): # Use set to avoid duplicates
graph.vertices_to_run.remove(v_id)
graph.remove_from_predecessors(v_id)
await chat_service.set_cache(flow_id=flow_id, data=graph, lock=lock)
return next_runnable_vertices
def get_top_level_vertices(graph, vertices_ids):
"""
Retrieves the top-level vertices from the given graph based on the provided vertex IDs.
Args:
graph (Graph): The graph object containing the vertices.
vertices_ids (list): A list of vertex IDs.
Returns:
list: A list of top-level vertex IDs.
"""
top_level_vertices = []
for vertex_id in vertices_ids:
vertex = graph.get_vertex(vertex_id)
if vertex.parent_is_top_level:
top_level_vertices.append(vertex.parent_node_id)
else:
top_level_vertices.append(vertex_id)
return top_level_vertices

View file

@ -10,6 +10,8 @@ from langflow.api.utils import (
build_and_cache_graph,
format_elapsed_time,
format_exception_message,
get_next_runnable_vertices,
get_top_level_vertices,
)
from langflow.api.v1.schemas import (
InputValueRequest,
@ -95,7 +97,8 @@ async def build_vertex(
"""Build a vertex instead of the entire graph."""
start_time = time.perf_counter()
next_vertices_ids = []
next_runnable_vertices = []
top_level_vertices = []
try:
start_time = time.perf_counter()
cache = await chat_service.get_cache(flow_id)
@ -121,12 +124,9 @@ async def build_vertex(
artifacts = vertex.artifacts
else:
raise ValueError(f"No result found for vertex {vertex_id}")
async with chat_service._cache_locks[flow_id] as lock:
graph.remove_from_predecessors(vertex_id)
next_vertices_ids = vertex.successors_ids
next_vertices_ids = [v for v in next_vertices_ids if graph.should_run_vertex(v)]
await chat_service.set_cache(flow_id=flow_id, data=graph, lock=lock)
next_runnable_vertices = await get_next_runnable_vertices(graph, vertex, vertex_id, chat_service, flow_id)
top_level_vertices = get_top_level_vertices(graph, next_runnable_vertices)
result_data_response = ResultDataResponse(**result_dict.model_dump())
except Exception as exc:
@ -166,12 +166,13 @@ async def build_vertex(
# to stop the build of the graph at a certain vertex
# if it is in next_vertices_ids, we need to remove other
# vertices from next_vertices_ids
if graph.stop_vertex and graph.stop_vertex in next_vertices_ids:
next_vertices_ids = [graph.stop_vertex]
if graph.stop_vertex and graph.stop_vertex in next_runnable_vertices:
next_runnable_vertices = [graph.stop_vertex]
build_response = VertexBuildResponse(
inactivated_vertices=inactivated_vertices,
next_vertices_ids=next_vertices_ids,
next_vertices_ids=next_runnable_vertices,
top_level_vertices=top_level_vertices,
valid=valid,
params=params,
id=vertex.id,
@ -201,7 +202,7 @@ async def build_vertex_stream(
async def stream_vertex():
try:
if not session_id:
cache = chat_service.get_cache(flow_id)
cache = await chat_service.get_cache(flow_id)
if not cache:
# If there's no cache
raise ValueError(f"No cache found for {flow_id}.")
@ -251,7 +252,7 @@ async def build_vertex_stream(
raise ValueError(f"No result found for vertex {vertex_id}")
except Exception as exc:
logger.error(f"Error building vertex: {exc}")
logger.exception(f"Error building vertex: {exc}")
yield str(StreamData(event="error", data={"error": str(exc)}))
finally:
logger.debug("Closing stream")

View file

@ -247,6 +247,7 @@ class VertexBuildResponse(BaseModel):
id: Optional[str] = None
inactivated_vertices: Optional[List[str]] = None
next_vertices_ids: Optional[List[str]] = None
top_level_vertices: Optional[List[str]] = None
valid: bool
params: Optional[Any] = Field(default_factory=dict)
"""JSON string of the params."""

View file

@ -44,13 +44,13 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest):
input_variables=input_variables,
frontend_node=None,
)
if not prompt_request.custom_fields:
prompt_request.custom_fields = defaultdict(list)
old_custom_fields = get_old_custom_fields(prompt_request.custom_fields, prompt_request.name)
if not prompt_request.frontend_node.custom_fields:
prompt_request.frontend_node.custom_fields = defaultdict(list)
old_custom_fields = get_old_custom_fields(prompt_request.frontend_node.custom_fields, prompt_request.name)
add_new_variables_to_template(
input_variables,
prompt_request.custom_fields,
prompt_request.frontend_node.custom_fields,
prompt_request.frontend_node.template,
prompt_request.name,
)
@ -58,13 +58,25 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest):
remove_old_variables_from_template(
old_custom_fields,
input_variables,
prompt_request.custom_fields,
prompt_request.frontend_node.custom_fields,
prompt_request.frontend_node.template,
prompt_request.name,
)
update_input_variables_field(input_variables, prompt_request.frontend_node.template)
# If frontend_node.template contains only one field that is type == 'prompt', then we can remove all fields that are not
# 'code', and not in the input_variables list.
prompt_fields = [
key
for key, field in prompt_request.frontend_node.template.items()
if isinstance(field, dict) and field["type"] == "prompt"
]
if len(prompt_fields) == 1:
for key, field in prompt_request.frontend_node.template.copy().items():
if isinstance(field, dict) and field["type"] != "code" and key not in input_variables + prompt_fields:
del prompt_request.frontend_node.template[key]
return PromptValidationResponse(
input_variables=input_variables,
frontend_node=prompt_request.frontend_node,

View file

@ -1,6 +1,6 @@
from typing import Optional
from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain_anthropic.chat_models import ChatAnthropic
from pydantic.v1 import SecretStr
from langflow.components.models.base.model import LCModelComponent

View file

@ -35,6 +35,14 @@ class Graph:
edges: List[Dict[str, str]],
flow_id: Optional[str] = None,
) -> None:
"""
Initializes a new instance of the Graph class.
Args:
nodes (List[Dict]): A list of dictionaries representing the vertices of the graph.
edges (List[Dict[str, str]]): A list of dictionaries representing the edges of the graph.
flow_id (Optional[str], optional): The ID of the flow. Defaults to None.
"""
self._vertices = nodes
self._edges = edges
self.raw_graph_data = {"nodes": nodes, "edges": edges}
@ -71,11 +79,26 @@ class Graph:
self.state_manager = GraphStateManager()
def get_state(self, name: str) -> Optional[Record]:
"""Returns the state of the graph."""
"""
Returns the state of the graph with the given name.
Args:
name (str): The name of the state.
Returns:
Optional[Record]: The state record, or None if the state does not exist.
"""
return self.state_manager.get_state(name, run_id=self._run_id)
def update_state(self, name: str, record: Union[str, Record], caller: Optional[str] = None) -> None:
"""Updates the state of the graph."""
"""
Updates the state of the graph with the given name.
Args:
name (str): The name of the state.
record (Union[str, Record]): The new state record.
caller (Optional[str], optional): The ID of the vertex that is updating the state. Defaults to None.
"""
if caller:
# If there is a caller which is a vertex_id, I want to activate
# all StateVertex in self.vertices that are not the caller
@ -86,6 +109,13 @@ class Graph:
self.state_manager.update_state(name, record, run_id=self._run_id)
def activate_state_vertices(self, name: str, caller: str):
"""
Activates the state vertices in the graph with the given name and caller.
Args:
name (str): The name of the state.
caller (str): The ID of the vertex that is updating the state.
"""
vertices_ids = []
for vertex_id in self._is_state_vertices:
if vertex_id == caller:
@ -104,10 +134,20 @@ class Graph:
self.vertices_to_run.update(vertices_ids)
def reset_activated_vertices(self):
"""
Resets the activated vertices in the graph.
"""
self.activated_vertices = []
def append_state(self, name: str, record: Union[str, Record], caller: Optional[str] = None) -> None:
"""Appends the state of the graph."""
"""
Appends the state of the graph with the given name.
Args:
name (str): The name of the state.
record (Union[str, Record]): The state record to append.
caller (Optional[str], optional): The ID of the vertex that is updating the state. Defaults to None.
"""
if caller:
self.activate_state_vertices(name, caller)
@ -115,17 +155,38 @@ class Graph:
@property
def run_id(self):
"""
The ID of the current run.
Returns:
str: The run ID.
Raises:
ValueError: If the run ID is not set.
"""
if not self._run_id:
raise ValueError("Run ID not set")
return self._run_id
def set_run_id(self, run_id: str):
"""
Sets the ID of the current run.
Args:
run_id (str): The run ID.
"""
for vertex in self.vertices:
self.state_manager.subscribe(run_id, vertex.update_graph_state)
self._run_id = run_id
@property
def sorted_vertices_layers(self) -> List[List[str]]:
"""
The sorted layers of vertices in the graph.
Returns:
List[List[str]]: The sorted layers of vertices.
"""
if not self._sorted_vertices_layers:
self.sort_vertices()
return self._sorted_vertices_layers
@ -148,7 +209,19 @@ class Graph:
stream: bool,
session_id: str,
) -> List[Optional["ResultData"]]:
"""Runs the graph with the given inputs."""
"""
Runs the graph with the given inputs.
Args:
inputs (Dict[str, str]): The input values for the graph.
input_components (list[str]): The components to run for the inputs.
outputs (list[str]): The outputs to retrieve from the graph.
stream (bool): Whether to stream the results or not.
session_id (str): The session ID for the graph.
Returns:
List[Optional["ResultData"]]: The outputs of the graph.
"""
for vertex_id in self._is_input_vertices:
vertex = self.get_vertex(vertex_id)
if input_components and (vertex_id not in input_components or vertex.display_name not in input_components):
@ -190,7 +263,19 @@ class Graph:
session_id: Optional[str] = None,
stream: bool = False,
) -> List[RunOutputs]:
"""Runs the graph with the given inputs."""
"""
Runs the graph with the given inputs.
Args:
inputs (list[Dict[str, str]]): The input values for the graph.
inputs_components (Optional[list[list[str]]], optional): The components to run for the inputs. Defaults to None.
outputs (Optional[list[str]], optional): The outputs to retrieve from the graph. Defaults to None.
session_id (Optional[str], optional): The session ID for the graph. Defaults to None.
stream (bool, optional): Whether to stream the results or not. Defaults to False.
Returns:
List[RunOutputs]: The outputs of the graph.
"""
# inputs is {"message": "Hello, world!"}
# we need to go through self.inputs and update the self._raw_params
# of the vertices that are inputs
@ -218,16 +303,23 @@ class Graph:
vertex_outputs.append(run_output_object)
return vertex_outputs
# vertices_layers is a list of lists ordered by the order the vertices
# should be built.
# We need to create a new method that will take the vertices_layers
# and return the next vertex to be built.
def next_vertex_to_build(self):
"""Returns the next vertex to be built."""
"""
Returns the next vertex to be built.
Yields:
str: The ID of the next vertex to be built.
"""
yield from chain.from_iterable(self.vertices_layers)
@property
def metadata(self):
"""
The metadata of the graph.
Returns:
dict: The metadata of the graph.
"""
return {
"runs": self._runs,
"updates": self._updates,
@ -235,12 +327,19 @@ class Graph:
}
def build_graph_maps(self):
"""
Builds the adjacency maps for the graph.
"""
self.predecessor_map, self.successor_map = self.build_adjacency_maps()
self.in_degree_map = self.build_in_degree()
self.parent_child_map = self.build_parent_child_map()
def reset_inactivated_vertices(self):
"""
Resets the inactivated vertices in the graph.
"""
self.inactivated_vertices = []
self.inactivated_vertices = set()
def mark_all_vertices(self, state: str):
@ -377,7 +476,10 @@ class Graph:
# Remove vertices that are not in the other graph
for vertex_id in removed_vertex_ids:
self.remove_vertex(vertex_id)
try:
self.remove_vertex(vertex_id)
except ValueError:
pass
# The order here matters because adding the vertex is required
# if any of them have edges that point to any of the new vertices
@ -741,8 +843,11 @@ class Graph:
vertex_data = vertex["data"]
vertex_type: str = vertex_data["type"] # type: ignore
vertex_base_type: str = vertex_data["node"]["template"]["_type"] # type: ignore
if "id" not in vertex_data:
raise ValueError(f"Vertex data for {vertex_data['display_name']} does not contain an id")
VertexClass = self._get_vertex_class(vertex_type, vertex_base_type, vertex_data["id"])
vertex_instance = VertexClass(vertex, graph=self)
vertex_instance.set_top_level(self.top_level_vertices)
vertices.append(vertex_instance)
@ -953,22 +1058,26 @@ class Graph:
# Return just the first layer
return first_layer
def vertex_has_no_more_predecessors(self, vertex_id: str) -> bool:
"""Returns whether a vertex has no more predecessors."""
return not self.run_predecessors.get(vertex_id)
def is_vertex_runnable(self, vertex_id: str) -> bool:
"""Returns whether a vertex is runnable."""
return vertex_id in self.vertices_to_run and not self.run_predecessors.get(vertex_id)
def should_run_vertex(self, vertex_id: str) -> bool:
"""Returns whether a component should be run."""
# the self.run_map is a map of vertex_id to a list of predecessors
# each time a vertex is run, we remove it from the list of predecessors
# if a vertex has no more predecessors, it should be run
should_run = vertex_id in self.vertices_to_run and self.vertex_has_no_more_predecessors(vertex_id)
def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str]:
"""
For each successor of the current vertex, find runnable predecessors if any.
This checks the direct predecessors of each successor to identify any that are
immediately runnable, expanding the search to ensure progress can be made.
"""
runnable_vertices = []
visited = set()
if should_run:
self.vertices_to_run.remove(vertex_id)
# remove the vertex from the run_map
self.remove_from_predecessors(vertex_id)
return should_run
for successor_id in self.run_map.get(vertex_id, []):
for predecessor_id in self.run_predecessors.get(successor_id, []):
if predecessor_id not in visited and self.is_vertex_runnable(predecessor_id):
runnable_vertices.append(predecessor_id)
visited.add(predecessor_id)
return runnable_vertices
def remove_from_predecessors(self, vertex_id: str):
predecessors = self.run_map.get(vertex_id, [])

View file

@ -1,4 +1,3 @@
import { cloneDeep } from "lodash";
import useFlowStore from "../../stores/flowStore";
import { IOOutputProps } from "../../types/components";
import { Textarea } from "../ui/textarea";

View file

@ -105,7 +105,13 @@ export default function IOView({
return (
<BaseModal
size={haveChat ? (selectedTab === 0 ? "large-thin" : "large") : "small"}
size={
haveChat || selectedViewField
? selectedTab === 0
? "large-thin"
: "large"
: "small"
}
open={open}
setOpen={setOpen}
disable={disable}
@ -123,13 +129,13 @@ export default function IOView({
</div>
</BaseModal.Header>
<BaseModal.Content>
<div className="flex h-full flex-col">
<div className="flex h-full flex-col overflow-hidden">
<div className="flex-max-width mt-2 h-full">
{selectedTab !== 0 && (
<div
className={cn(
"mr-6 flex h-full w-2/6 flex-shrink-0 flex-col justify-start",
haveChat ? "w-2/6" : "w-full"
"mr-6 flex h-full w-2/6 flex-shrink-0 flex-col justify-start transition-all duration-300",
haveChat || selectedViewField ? "w-2/6" : "w-full"
)}
>
<Tabs
@ -179,20 +185,18 @@ export default function IOView({
<Badge variant="gray" size="md">
{node.data.node.display_name}
</Badge>
{haveChat && (
<div
className="-mb-1 pr-4"
onClick={(event) => {
event.stopPropagation();
setSelectedViewField(input);
}}
>
<IconComponent
className="h-4 w-4"
name="ExternalLink"
></IconComponent>
</div>
)}
<div
className="-mb-1 pr-4"
onClick={(event) => {
event.stopPropagation();
setSelectedViewField(input);
}}
>
<IconComponent
className="h-4 w-4"
name="ExternalLink"
></IconComponent>
</div>
</div>
}
key={index}
@ -248,20 +252,18 @@ export default function IOView({
</Badge>
</div>
</ShadTooltip>
{haveChat && (
<div
className="-mb-1 pr-4"
onClick={(event) => {
event.stopPropagation();
setSelectedViewField(output);
}}
>
<IconComponent
className="h-4 w-4"
name="ExternalLink"
></IconComponent>
</div>
)}
<div
className="-mb-1 pr-4"
onClick={(event) => {
event.stopPropagation();
setSelectedViewField(output);
}}
>
<IconComponent
className="h-4 w-4"
name="ExternalLink"
></IconComponent>
</div>
</div>
}
key={index}
@ -287,12 +289,12 @@ export default function IOView({
</div>
)}
{haveChat ? (
{haveChat || selectedViewField ? (
<div className="flex h-full min-w-96 flex-grow">
{selectedViewField && (
<div
className={cn(
"flex h-full w-full flex-col items-start gap-4 p-4",
"flex h-full w-full flex-col items-start gap-4 pt-4",
!selectedViewField ? "hidden" : ""
)}
>
@ -347,7 +349,7 @@ export default function IOView({
</BaseModal.Content>
<BaseModal.Footer>
{!haveChat && (
<div className="flex w-full justify-end pt-6">
<div className="flex w-full justify-end pt-2">
<Button
variant={"outline"}
className="flex gap-2 px-3"

View file

@ -29,6 +29,7 @@ export const MenuBar = ({
const addFlow = useFlowsManagerStore((state) => state.addFlow);
const currentFlow = useFlowsManagerStore((state) => state.currentFlow);
const setErrorData = useAlertStore((state) => state.setErrorData);
const setSuccessData = useAlertStore((state) => state.setSuccessData);
const undo = useFlowsManagerStore((state) => state.undo);
const redo = useFlowsManagerStore((state) => state.redo);
const saveLoading = useFlowsManagerStore((state) => state.saveLoading);
@ -38,11 +39,21 @@ export const MenuBar = ({
const navigate = useNavigate();
const isBuilding = useFlowStore((state) => state.isBuilding);
function handleAddFlow() {
function handleAddFlow(duplicate?: boolean) {
try {
addFlow(true).then((id) => {
navigate("/flow/" + id);
});
if (duplicate) {
if (!currentFlow) {
throw new Error("No flow to duplicate");
}
addFlow(true, currentFlow).then((id) => {
setSuccessData({ title: "Flow duplicated successfully" });
navigate("/flow/" + id);
});
} else {
addFlow(true).then((id) => {
navigate("/flow/" + id);
});
}
} catch (err) {
setErrorData(err as { title: string; list?: Array<string> });
}
@ -62,7 +73,7 @@ export const MenuBar = ({
<button
onClick={() => {
removeFunction(nodes);
navigate(-1);
navigate("/");
}}
>
<IconComponent name="ChevronLeft" className="w-4" />
@ -88,6 +99,15 @@ export const MenuBar = ({
<IconComponent name="Plus" className="header-menu-options" />
New
</DropdownMenuItem>
<DropdownMenuItem
onClick={() => {
handleAddFlow(true);
}}
className="cursor-pointer"
>
<IconComponent name="Copy" className="header-menu-options" />
Duplicate
</DropdownMenuItem>
<DropdownMenuItem
onClick={() => {

View file

@ -1,6 +1,7 @@
import * as Form from "@radix-ui/react-form";
import { PopoverAnchor } from "@radix-ui/react-popover";
import { useEffect, useRef, useState } from "react";
import useAlertStore from "../../stores/alertStore";
import { InputComponentType } from "../../types/components";
import { handleKeyDown } from "../../utils/reactflowUtils";
import { classNames, cn } from "../../utils/utils";
@ -37,6 +38,7 @@ export default function InputComponent({
optionsButton,
optionButton,
}: InputComponentType): JSX.Element {
const setErrorData = useAlertStore.getState().setErrorData;
const [pwdVisible, setPwdVisible] = useState(false);
const refInput = useRef<HTMLInputElement>(null);
const [showOptions, setShowOptions] = useState<boolean>(false);
@ -119,8 +121,23 @@ export default function InputComponent({
)}
placeholder={password && editNode ? "Key" : placeholder}
onChange={(e) => {
onChange && onChange(e.target.value);
}}
// if the user copies a password from another input
// it might come as ••••••••••• it causes errors
// in ascii encoding, so we need to handle it
if (password) {
// check if all chars are •
if (e.target.value.split("").every((char) => char === "•")) {
setErrorData({
title: `Invalid characters: ${e.target.value}`,
list: [
"It seems you are trying to paste a password. Make sure the value is visible before copying from another field.",
],
});
}
onChange && onChange(e.target.value);
}
}}
onKeyDown={(e) => {
handleKeyDown(e, value, "");
if (blurOnEnter && e.key === "Enter")

View file

@ -29,7 +29,7 @@ export default function RenameLabel(props) {
}
resizeInput();
return () => {
if(isRename) document.removeEventListener("keydown", () => {});
if (isRename) document.removeEventListener("keydown", () => {});
};
}, [isRename]);

View file

@ -1,7 +1,6 @@
// src/constants/constants.ts
import { languageMap } from "../types/components";
import { FlowType } from "../types/flow";
/**
* invalid characters for flow name
@ -750,75 +749,5 @@ export const NATIVE_CATEGORIES = [
"helpers",
"experimental",
];
/*
Data ingestion
Basic Prompting
Chat com memória
Working with data (file/website)
API requests
Vector Store
Assistant
*/
export const EXAMPLES_MOCK: FlowType[] = [
{
name: "Working with data",
id: "Working with data Description",
data: {
nodes: [],
edges: [],
viewport: { zoom: 1, x: 1, y: 1 },
},
description: "This flow represents the first process in our application.",
folder: STARTER_FOLDER_NAME,
user_id: undefined,
},
{
name: "Basic Prompting",
id: "Basic Prompting Description",
data: {
nodes: [],
edges: [],
viewport: { zoom: 1, x: 1, y: 1 },
},
description: "This flow represents the first process in our application.",
folder: STARTER_FOLDER_NAME,
user_id: undefined,
},
{
name: "Chat with memory",
id: "Chat with memory Description",
data: {
nodes: [],
edges: [],
viewport: { zoom: 1, x: 1, y: 1 },
},
description: "This flow represents the first process in our application.",
folder: STARTER_FOLDER_NAME,
user_id: undefined,
},
{
name: "API requests",
id: "API requests Description",
data: {
nodes: [],
edges: [],
viewport: { zoom: 1, x: 1, y: 1 },
},
description: "This flow represents the first process in our application.",
folder: STARTER_FOLDER_NAME,
user_id: undefined,
},
{
name: "Assistant",
id: "Assistant Description",
data: {
nodes: [],
edges: [],
viewport: { zoom: 1, x: 1, y: 1 },
},
description: "This flow represents the first process in our application.",
folder: STARTER_FOLDER_NAME,
user_id: undefined,
},
];
export const SAVE_DEBOUNCE_TIME = 500;

View file

@ -10,6 +10,7 @@ import {
DialogTrigger,
} from "../../components/ui/dialog";
import { modalHeaderType } from "../../types/components";
import { cn } from "../../utils/utils";
type ContentProps = { children: ReactNode };
type HeaderProps = { children: ReactNode; description: string };
@ -157,11 +158,15 @@ function BaseModal({
return (
<Dialog open={open} onOpenChange={setOpen}>
{triggerChild}
<DialogContent className={minWidth}>
<DialogContent className={cn(minWidth, "duration-300")}>
<div className="truncate-doubleline word-break-break-word">
{headerChild}
</div>
<div className={`flex flex-col ${height!} w-full `}>{ContentChild}</div>
<div
className={`flex flex-col ${height!} w-full transition-all duration-300`}
>
{ContentChild}
</div>
{ContentFooter && (
<div className="flex flex-row-reverse">{ContentFooter}</div>
)}

View file

@ -146,75 +146,83 @@ export default function CodeAreaModal({
setCode(value);
}, [value, open]);
const handlePreventEsc = (e: React.KeyboardEvent) => {
if (e.key === "Escape") {
e.preventDefault();
}
};
return (
<BaseModal open={open} setOpen={setOpen}>
<BaseModal.Trigger>{children}</BaseModal.Trigger>
<BaseModal.Header description={CODE_PROMPT_DIALOG_SUBTITLE}>
<span className="pr-2"> {EDIT_CODE_TITLE} </span>
<IconComponent
name="prompts"
className="h-6 w-6 pl-1 text-primary "
aria-hidden="true"
/>
</BaseModal.Header>
<BaseModal.Content>
<Input
value={code}
readOnly
className="absolute left-[500%] top-[500%]"
id="codeValue"
/>
<div className="flex h-full w-full flex-col transition-all">
<div className="h-full w-full">
<AceEditor
readOnly={readonly}
value={code}
mode="python"
setOptions={{ fontFamily: "monospace" }}
height={height ?? "100%"}
highlightActiveLine={true}
showPrintMargin={false}
fontSize={14}
showGutter
enableLiveAutocompletion
theme={dark ? "twilight" : "github"}
name="CodeEditor"
onChange={(value) => {
setCode(value);
}}
className="h-full w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600"
/>
</div>
<div
className={
"whitespace-break-spaces transition-all delay-500" +
(error?.detail?.error !== undefined ? "h-2/6" : "h-0")
}
>
<div className="mt-1 h-full max-h-[10rem] w-full overflow-y-auto overflow-x-clip text-left custom-scroll">
<h1 className="text-lg text-destructive">
{error?.detail?.error}
</h1>
<div className="ml-2 w-full text-sm text-status-red word-break-break-word">
<span className="w-full word-break-break-word">
{error?.detail?.traceback}
</span>
<div onKeyDown={(e) => handlePreventEsc(e)}>
<BaseModal open={open} setOpen={setOpen}>
<BaseModal.Trigger>{children}</BaseModal.Trigger>
<BaseModal.Header description={CODE_PROMPT_DIALOG_SUBTITLE}>
<span className="pr-2"> {EDIT_CODE_TITLE} </span>
<IconComponent
name="prompts"
className="h-6 w-6 pl-1 text-primary "
aria-hidden="true"
/>
</BaseModal.Header>
<BaseModal.Content>
<Input
value={code}
readOnly
className="absolute left-[500%] top-[500%]"
id="codeValue"
/>
<div className="flex h-full w-full flex-col transition-all">
<div className="h-full w-full">
<AceEditor
readOnly={readonly}
value={code}
mode="python"
setOptions={{ fontFamily: "monospace" }}
height={height ?? "100%"}
highlightActiveLine={true}
showPrintMargin={false}
fontSize={14}
showGutter
enableLiveAutocompletion
theme={dark ? "twilight" : "github"}
name="CodeEditor"
onChange={(value) => {
setCode(value);
}}
className="h-full w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600"
/>
</div>
<div
className={
"whitespace-break-spaces transition-all delay-500" +
(error?.detail?.error !== undefined ? "h-2/6" : "h-0")
}
>
<div className="mt-1 h-full max-h-[10rem] w-full overflow-y-auto overflow-x-clip text-left custom-scroll">
<h1 className="text-lg text-destructive">
{error?.detail?.error}
</h1>
<div className="ml-2 w-full text-sm text-status-red word-break-break-word">
<span className="w-full word-break-break-word">
{error?.detail?.traceback}
</span>
</div>
</div>
</div>
<div className="flex h-fit w-full justify-end">
<Button
className="mt-3"
onClick={handleClick}
type="submit"
id="checkAndSaveBtn"
disabled={readonly}
>
Check & Save
</Button>
</div>
</div>
<div className="flex h-fit w-full justify-end">
<Button
className="mt-3"
onClick={handleClick}
type="submit"
id="checkAndSaveBtn"
disabled={readonly}
>
Check & Save
</Button>
</div>
</div>
</BaseModal.Content>
</BaseModal>
</BaseModal.Content>
</BaseModal>
</div>
);
}

View file

@ -1,4 +1,4 @@
import _ from "lodash";
import _, { cloneDeep } from "lodash";
import { MouseEvent, useCallback, useEffect, useRef, useState } from "react";
import ReactFlow, {
Background,
@ -32,6 +32,7 @@ import {
isValidConnection,
reconnectEdges,
scapeJSONParse,
updateIds,
validateSelection,
} from "../../../../utils/reactflowUtils";
import { getRandomName, isWrappedWithClass } from "../../../../utils/utils";
@ -97,6 +98,56 @@ export default function Page({
useEffect(() => {
const onKeyDown = (event: KeyboardEvent) => {
const selectedNode = nodes.filter((obj) => obj.selected);
if (
selectionMenuVisible &&
(event.ctrlKey || event.metaKey) &&
event.key === "g"
) {
event.preventDefault();
takeSnapshot();
if (validateSelection(lastSelection!, edges).length === 0) {
const clonedNodes = cloneDeep(nodes);
const clonedEdges = cloneDeep(edges);
const clonedSelection = cloneDeep(lastSelection);
updateIds(
{ nodes: clonedNodes, edges: clonedEdges },
clonedSelection!
);
const { newFlow, removedEdges } = generateFlow(
clonedSelection!,
clonedNodes,
clonedEdges,
getRandomName()
);
const newGroupNode = generateNodeFromFlow(newFlow, getNodeId);
const newEdges = reconnectEdges(newGroupNode, removedEdges);
setNodes([
...clonedNodes.filter(
(oldNodes) =>
!clonedSelection?.nodes.some(
(selectionNode) => selectionNode.id === oldNodes.id
)
),
newGroupNode,
]);
setEdges([
...clonedEdges.filter(
(oldEdge) =>
!clonedSelection!.nodes.some(
(selectionNode) =>
selectionNode.id === oldEdge.target ||
selectionNode.id === oldEdge.source
)
),
...newEdges,
]);
} else {
setErrorData({
title: INVALID_SELECTION_ERROR_ALERT,
list: validateSelection(lastSelection!, edges),
});
}
}
if (
(event.ctrlKey || event.metaKey) &&
event.key === "p" &&
@ -201,7 +252,7 @@ export default function Page({
document.removeEventListener("keydown", onKeyDown);
document.removeEventListener("mousemove", handleMouseMove);
};
}, [lastCopiedSelection, lastSelection, takeSnapshot]);
}, [lastCopiedSelection, lastSelection, takeSnapshot, selectionMenuVisible]);
useEffect(() => {
if (reactFlowInstance && currentFlowId) {
@ -437,10 +488,17 @@ export default function Page({
if (
validateSelection(lastSelection!, edges).length === 0
) {
const clonedNodes = cloneDeep(nodes);
const clonedEdges = cloneDeep(edges);
const clonedSelection = cloneDeep(lastSelection);
updateIds(
{ nodes: clonedNodes, edges: clonedEdges },
clonedSelection!
);
const { newFlow, removedEdges } = generateFlow(
lastSelection!,
nodes,
edges,
clonedSelection!,
clonedNodes,
clonedEdges,
getRandomName()
);
const newGroupNode = generateNodeFromFlow(
@ -451,20 +509,20 @@ export default function Page({
newGroupNode,
removedEdges
);
setNodes((oldNodes) => [
...oldNodes.filter(
setNodes([
...clonedNodes.filter(
(oldNodes) =>
!lastSelection?.nodes.some(
!clonedSelection?.nodes.some(
(selectionNode) =>
selectionNode.id === oldNodes.id
)
),
newGroupNode,
]);
setEdges((oldEdges) => [
...oldEdges.filter(
setEdges([
...clonedEdges.filter(
(oldEdge) =>
!lastSelection!.nodes.some(
!clonedSelection!.nodes.some(
(selectionNode) =>
selectionNode.id === oldEdge.target ||
selectionNode.id === oldEdge.source

View file

@ -1,4 +1,4 @@
import { cloneDeep } from "lodash";
import { cloneDeep, zip } from "lodash";
import {
Edge,
EdgeChange,
@ -26,6 +26,7 @@ import {
ChatOutputType,
FlowPoolObjectType,
FlowStoreType,
VertexLayerElementType,
chatInputType,
} from "../types/zustand/flow";
import { buildVertices } from "../utils/buildUtils";
@ -36,6 +37,7 @@ import {
getNodeId,
scapeJSONParse,
scapedJSONStringfy,
updateGroupRecursion,
validateNodes,
} from "../utils/reactflowUtils";
import { getInputsAndOutputs } from "../utils/storeUtils";
@ -221,6 +223,8 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
);
},
paste: (selection, position) => {
function updateGroup() {}
if (
selection.nodes.some((node) => node.data.type === "ChatInput") &&
checkChatInput(get().nodes)
@ -257,6 +261,8 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
let newId = getNodeId(node.data.type);
idsMap[node.id] = newId;
updateGroupRecursion(node, selection.edges);
// Create a new node object
const newNode: NodeType = {
id: newId,
@ -459,9 +465,18 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
// verticesLayers is a list of list of vertices ids, where each list is a layer of vertices
// we want to add a new layer (next_vertices_ids) to the list of layers (verticesLayers)
// and the values of next_vertices_ids to the list of vertices ids (verticesIds)
// const nextVertices will be the zip of vertexBuildData.next_vertices_ids and
// vertexBuildData.top_level_vertices
// the VertexLayerElementType as {id: next_vertices_id, layer: top_level_vertex}
const nextVertices: VertexLayerElementType[] = zip(
vertexBuildData.next_vertices_ids,
vertexBuildData.top_level_vertices
).map(([id, reference]) => ({ id: id!, reference }));
const newLayers = [
...get().verticesBuild!.verticesLayers,
vertexBuildData.next_vertices_ids,
nextVertices,
];
const newIds = [
...get().verticesBuild!.verticesIds,
@ -473,7 +488,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
runId: runId,
});
get().updateBuildStatus(
vertexBuildData.next_vertices_ids,
vertexBuildData.top_level_vertices,
BuildStatus.TO_BUILD
);
}
@ -508,12 +523,19 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
get().setIsBuilding(false);
},
onBuildUpdate: handleBuildUpdate,
onBuildError: (title, list, idList) => {
onBuildError: (title: string, list: string[], elementList) => {
const idList = elementList
.map((element) => element.id)
.filter(Boolean) as string[];
useFlowStore.getState().updateBuildStatus(idList, BuildStatus.BUILT);
setErrorData({ list, title });
get().setIsBuilding(false);
},
onBuildStart: (idList) => {
onBuildStart: (elementList) => {
const idList = elementList
// reference is the id of the vertex or the id of the parent in a group node
.map((element) => element.reference)
.filter(Boolean) as string[];
useFlowStore.getState().updateBuildStatus(idList, BuildStatus.BUILDING);
},
validateNodes: validateSubgraph,
@ -531,7 +553,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
updateVerticesBuild: (
vertices: {
verticesIds: string[];
verticesLayers: string[][];
verticesLayers: VertexLayerElementType[][];
runId: string;
} | null
) => {
@ -562,6 +584,7 @@ const useFlowStore = create<FlowStoreType>((set, get) => ({
},
updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => {
const newFlowBuildStatus = { ...get().flowBuildStatus };
console.log("newFlowBuildStatus", newFlowBuildStatus);
nodeIdList.forEach((id) => {
newFlowBuildStatus[id] = {
status,

View file

@ -1,8 +1,11 @@
import { AxiosError } from "axios";
import { cloneDeep } from "lodash";
import { cloneDeep, debounce } from "lodash";
import { Edge, Node, Viewport, XYPosition } from "reactflow";
import { create } from "zustand";
import { STARTER_FOLDER_NAME } from "../constants/constants";
import {
SAVE_DEBOUNCE_TIME,
STARTER_FOLDER_NAME,
} from "../constants/constants";
import {
deleteFlowFromDatabase,
readFlowsFromDatabase,
@ -92,22 +95,18 @@ const useFlowsManagerStore = create<FlowsManagerStoreType>((set, get) => ({
});
});
},
autoSaveCurrentFlow: (nodes: Node[], edges: Edge[], viewport: Viewport) => {
// Clear the previous timeout if it exists.
if (saveTimeoutId) {
clearTimeout(saveTimeoutId);
}
set({ saveLoading: true });
// Set up a new timeout.
saveTimeoutId = setTimeout(() => {
autoSaveCurrentFlow: debounce(
(nodes: Node[], edges: Edge[], viewport: Viewport) => {
set({ saveLoading: true });
if (get().currentFlow) {
get().saveFlow(
{ ...get().currentFlow!, data: { nodes, edges, viewport } },
true
);
}
}, 500); // Delay of 500ms because chat message depends on it.
},
},
SAVE_DEBOUNCE_TIME
),
saveFlow: (flow: FlowType, silent?: boolean) => {
set({ saveLoading: true });
return new Promise<void>((resolve, reject) => {

View file

@ -146,6 +146,7 @@ export type VertexBuildTypeAPI = {
id: string;
inactivated_vertices: Array<string> | null;
next_vertices_ids: Array<string>;
top_level_vertices: Array<string>;
run_id: string;
valid: boolean;
params: string;

View file

@ -35,6 +35,11 @@ export type FlowPoolObjectType = {
buildId: string;
};
export type VertexLayerElementType = {
id: string;
reference?: string;
};
export type FlowPoolType = {
[key: string]: Array<FlowPoolObjectType>;
};
@ -103,7 +108,7 @@ export type FlowStoreType = {
updateVerticesBuild: (
vertices: {
verticesIds: string[];
verticesLayers: string[][];
verticesLayers: VertexLayerElementType[][];
runId: string;
} | null
) => void;
@ -111,7 +116,7 @@ export type FlowStoreType = {
removeFromVerticesBuild: (vertices: string[]) => void;
verticesBuild: {
verticesIds: string[];
verticesLayers: string[][];
verticesLayers: VertexLayerElementType[][];
runId: string;
} | null;
updateBuildStatus: (nodeId: string[], status: BuildStatus) => void;

View file

@ -4,6 +4,7 @@ import { getVerticesOrder, postBuildVertex } from "../controllers/API";
import useAlertStore from "../stores/alertStore";
import useFlowStore from "../stores/flowStore";
import { VertexBuildTypeAPI } from "../types/api";
import { VertexLayerElementType } from "../types/zustand/flow";
type BuildVerticesParams = {
flowId: string; // Assuming FlowType is the type for your flow
@ -17,8 +18,8 @@ type BuildVerticesParams = {
buildId: string
) => void; // Replace any with the actual type if it's not any
onBuildComplete?: (allNodesValid: boolean) => void;
onBuildError?: (title, list, idList: string[]) => void;
onBuildStart?: (idList: string[]) => void;
onBuildError?: (title, list, idList: VertexLayerElementType[]) => void;
onBuildStart?: (idList: VertexLayerElementType[]) => void;
validateNodes?: (nodes: string[]) => void;
};
@ -35,6 +36,7 @@ function getInactiveVertexData(vertexId: string): VertexBuildTypeAPI {
inactivated_vertices: null,
run_id: "",
next_vertices_ids: [],
top_level_vertices: [],
inactive_vertices: null,
valid: false,
timestamp: new Date().toISOString(),
@ -48,7 +50,7 @@ export async function updateVerticesOrder(
startNodeId?: string | null,
stopNodeId?: string | null
): Promise<{
verticesLayers: string[][];
verticesLayers: VertexLayerElementType[][];
verticesIds: string[];
runId: string;
}> {
@ -66,7 +68,14 @@ export async function updateVerticesOrder(
useFlowStore.getState().setIsBuilding(false);
throw new Error("Invalid nodes");
}
let verticesLayers: Array<Array<string>> = [orderResponse.data.ids];
// orderResponse.data.ids,
// for each id we need to build the VertexLayerElementType object as
// {id: id, reference: id}
let verticesLayers: Array<Array<VertexLayerElementType>> =
orderResponse.data.ids.map((id: string) => {
return [{ id: id, reference: id }];
});
const runId = orderResponse.data.run_id;
// if (nodeId) {
// for (let i = 0; i < verticesOrder.length; i += 1) {
@ -160,17 +169,17 @@ export async function buildVertices({
if (onBuildStart) onBuildStart(currentLayer);
// Build each vertex in the current layer
await Promise.all(
currentLayer.map(async (vertexId) => {
currentLayer.map(async (element) => {
// Check if id is in the list of inactive nodes
if (
!useFlowStore
.getState()
.verticesBuild?.verticesIds.includes(vertexId) &&
.verticesBuild?.verticesIds.includes(element.id) &&
onBuildUpdate
) {
// If it is, skip building and set the state to inactive
onBuildUpdate(
getInactiveVertexData(vertexId),
getInactiveVertexData(element.id),
BuildStatus.INACTIVE,
runId
);
@ -181,7 +190,7 @@ export async function buildVertices({
// Build the vertex
await buildVertex({
flowId,
id: vertexId,
id: element.id,
input_value,
onBuildUpdate: (data: VertexBuildTypeAPI, status: BuildStatus) => {
if (onBuildUpdate) onBuildUpdate(data, status, runId);
@ -226,7 +235,7 @@ async function buildVertex({
id: string;
input_value: string;
onBuildUpdate?: (data: any, status: BuildStatus) => void;
onBuildError?: (title, list, idList: string[]) => void;
onBuildError?: (title, list, idList: VertexLayerElementType[]) => void;
verticesIds: string[];
buildResults: boolean[];
stopBuild: () => void;
@ -240,7 +249,7 @@ async function buildVertex({
onBuildError!(
"Error Building Component",
[buildData.params],
verticesIds
verticesIds.map((id) => ({ id }))
);
stopBuild();
}
@ -251,7 +260,7 @@ async function buildVertex({
onBuildError!(
"Error Building Component",
[(error as AxiosError<any>).response?.data?.detail ?? "Unknown Error"],
verticesIds
verticesIds.map((id) => ({ id }))
);
stopBuild();
}

View file

@ -204,23 +204,33 @@ export const processDataFromFlow = (flow: FlowType, refreshIds = true) => {
return data;
};
export function updateIds(newFlow: ReactFlowJsonObject) {
export function updateIds(
{ edges, nodes }: { edges: Edge[]; nodes: Node[] },
selection?: { edges: Edge[]; nodes: Node[] }
) {
let idsMap = {};
if (newFlow.nodes)
newFlow.nodes.forEach((node: NodeType) => {
const selectionIds = selection?.nodes.map((n) => n.id);
if (nodes) {
nodes.forEach((node: NodeType) => {
// Generate a unique node ID
let newId = getNodeId(
node.data.node?.flow ? "GroupNode" : node.data.type
);
let newId = getNodeId(node.data.type);
if (selection && !selectionIds?.includes(node.id)) {
newId = node.id;
}
idsMap[node.id] = newId;
node.id = newId;
node.data.id = newId;
// Add the new node to the list of nodes in state
});
if (newFlow.edges)
newFlow.edges.forEach((edge: Edge) => {
selection?.nodes.forEach((sNode: NodeType) => {
let newId = idsMap[sNode.id];
sNode.id = newId;
sNode.data.id = newId;
});
}
const concatedEdges = [...edges, ...(selection?.edges ?? [])];
if (concatedEdges)
concatedEdges.forEach((edge: Edge) => {
edge.source = idsMap[edge.source];
edge.target = idsMap[edge.target];
const sourceHandleObject: sourceHandleType = scapeJSONParse(
@ -273,6 +283,8 @@ export function validateNode(node: NodeType, edges: Edge[]): Array<string> {
node: { template },
} = node.data;
const displayName = node.data.node.display_name;
return Object.keys(template).reduce((errors: Array<string>, t) => {
if (
template[t].required &&
@ -288,7 +300,9 @@ export function validateNode(node: NodeType, edges: Edge[]): Array<string> {
node.id
)
) {
errors.push(`${type} is missing ${getFieldTitle(template, t)}.`);
errors.push(
`${displayName || type} is missing ${getFieldTitle(template, t)}.`
);
} else if (
template[t].type === "dict" &&
template[t].required &&
@ -633,8 +647,8 @@ export function generateFlow(
}
export function reconnectEdges(groupNode: NodeType, excludedEdges: Edge[]) {
let newEdges = cloneDeep(excludedEdges);
if (!groupNode.data.node!.flow) return [];
let newEdges = cloneDeep(excludedEdges);
const { nodes, edges } = groupNode.data.node!.flow!.data!;
const lastNode = findLastNode(groupNode.data.node!.flow!.data!);
newEdges.forEach((edge) => {
@ -951,7 +965,7 @@ export function connectedInputNodesOnHandle(
return connectedNodes;
}
function updateProxyIdsOnTemplate(
export function updateProxyIdsOnTemplate(
template: APITemplateType,
idsMap: { [key: string]: string }
) {
@ -962,12 +976,16 @@ function updateProxyIdsOnTemplate(
});
}
function updateEdgesIds(edges: Edge[], idsMap: { [key: string]: string }) {
export function updateEdgesIds(
edges: Edge[],
idsMap: { [key: string]: string }
) {
edges.forEach((edge) => {
let targetHandle: targetHandleType = edge.data.targetHandle;
if (targetHandle.proxy && idsMap[targetHandle.proxy!.id]) {
targetHandle.proxy!.id = idsMap[targetHandle.proxy!.id];
}
console.log("edge", edge);
edge.data.targetHandle = targetHandle;
edge.targetHandle = scapedJSONStringfy(targetHandle);
});
@ -1255,3 +1273,18 @@ export function isInputType(type: string): boolean {
export function isOutputType(type: string): boolean {
return OUTPUT_TYPES.has(type);
}
export function updateGroupRecursion(groupNode: NodeType, edges: Edge[]) {
if (groupNode.data.node?.flow) {
groupNode.data.node.flow.data!.nodes.forEach((node) => {
if (node.data.node?.flow) {
updateGroupRecursion(node, node.data.node.flow.data!.edges);
}
});
let newFlow = groupNode.data.node!.flow;
const idsMap = updateIds(newFlow.data!);
updateProxyIdsOnTemplate(groupNode.data.node!.template, idsMap);
let flowEdges = edges;
updateEdgesIds(flowEdges, idsMap);
}
}

View file

@ -705,3 +705,8 @@ export function sortFields(a, b, fieldOrder) {
// You might want to sort them alphabetically or in another specific manner
return a.localeCompare(b);
}
export function freezeObject(obj: any) {
if (!obj) return obj;
return JSON.parse(JSON.stringify(obj));
}

View file

@ -30,6 +30,7 @@ if TYPE_CHECKING:
def pytest_configure():
pytest.BASIC_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "basic_example.json"
pytest.COMPLEX_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "complex_example.json"
pytest.COMPLEX_DEPS_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "complex_deps_example.json"
pytest.OPENAPI_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "Openapi.json"
pytest.GROUPED_CHAT_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "grouped_chat.json"
pytest.ONE_GROUPED_CHAT_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "one_group_chat.json"
@ -192,6 +193,16 @@ def json_vector_store():
return f.read()
@pytest.fixture
def complex_graph_with_groups():
with open(pytest.COMPLEX_DEPS_EXAMPLE_PATH, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
edges = data_graph["edges"]
return Graph(nodes, edges)
@pytest.fixture(name="client", autouse=True)
def client_fixture(session: Session, monkeypatch):
# Set the database url to a test database

View file

@ -430,3 +430,15 @@ async def test_pickle_each_vertex(json_vector_store):
assert pickled is not None
unpickled = pickle.loads(pickled)
assert unpickled is not None
@pytest.mark.asyncio
async def test_build_ordering(complex_graph_with_groups):
sorted_vertices = complex_graph_with_groups.sort_vertices(stop_component_id="ChatInput-Ay8QQ")
assert sorted_vertices == [
"ChatInput-Ay8QQ",
"RecordsAsText-vkx2A",
"FileLoader-Vo1Cq",
]
sorted_vertices = complex_graph_with_groups.sort_vertices()