diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index a3efe2b0c..3ce5da615 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -14,6 +14,23 @@ def build_langchain_object_with_caching(data_graph): return graph.build() +@memoize_dict(maxsize=10) +def build_sorted_vertices_with_caching(data_graph): + """ + Build langchain object from data_graph. + """ + + logger.debug("Building langchain object") + graph = Graph.from_payload(data_graph) + sorted_vertices = graph.topological_sort() + artifacts = {} + for vertex in sorted_vertices: + vertex.build() + if vertex.artifacts: + artifacts.update(vertex.artifacts) + return graph.build(), artifacts + + def build_langchain_object(data_graph): """ Build langchain object from data_graph. diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index abf7a00b8..3ccb3a8b1 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -2,7 +2,7 @@ from pathlib import Path from langchain.schema import AgentAction import json from langflow.interface.run import ( - build_langchain_object_with_caching, + build_sorted_vertices_with_caching, get_memory_key, update_memory_keys, ) @@ -88,8 +88,16 @@ def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = No with PromptTemplate,then run the graph and return the result and thought. """ # Load langchain object - langchain_object = build_langchain_object_with_caching(data_graph) + langchain_object, artifacts = build_sorted_vertices_with_caching(data_graph) logger.debug("Loaded LangChain object") + if inputs is None: + inputs = {} + for ( + key, + value, + ) in artifacts.items(): + if key not in inputs or not inputs[key]: + inputs[key] = value if langchain_object is None: # Raise user facing error