diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 456d6e483..ddd9dc09b 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -91,10 +91,10 @@ async def process_flow( graph_data = process_tweaks(graph_data, tweaks) except Exception as exc: logger.error(f"Error processing tweaks: {exc}") - response = process_graph_cached(graph_data, inputs, clear_cache, session_id) - return ProcessResponse( - result=response, + response, session_id = process_graph_cached( + graph_data, inputs, clear_cache, session_id ) + return ProcessResponse(result=response, session_id=session_id) except Exception as e: # Log stack trace logger.exception(e) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index 0148dac6d..c5539c048 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -47,6 +47,7 @@ class ProcessResponse(BaseModel): """Process response schema.""" result: dict + session_id: str = None class ChatMessage(BaseModel): diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index 6d72a736b..5549a2ae1 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -117,6 +117,7 @@ def process_graph_cached( logger.debug("Cleared cache") langchain_object, artifacts = get_build_result(data_graph, session_id) + session_id = build_sorted_vertices_with_caching.hash logger.debug("Loaded LangChain object") if inputs is None: @@ -151,7 +152,7 @@ def process_graph_cached( raise ValueError( f"Unknown langchain_object type: {type(langchain_object).__name__}" ) - return result + return result, session_id def load_flow_from_json(