diff --git a/src/backend/langflow/api/utils.py b/src/backend/langflow/api/utils.py index bfd9e3da5..cd7daa5d2 100644 --- a/src/backend/langflow/api/utils.py +++ b/src/backend/langflow/api/utils.py @@ -22,3 +22,26 @@ def remove_api_keys(flow: dict): value["value"] = None return flow + + +def build_input_keys_response(langchain_object): + """Build the input keys response.""" + input_keys_response = { + "input_keys": langchain_object.input_keys, + "memory_keys": [], + } + # If the object has memory, that memory will have a memory_variables attribute + # memory variables should be removed from the input keys + if hasattr(langchain_object, "memory") and hasattr( + langchain_object.memory, "memory_variables" + ): + # Remove memory variables from input keys + input_keys_response["input_keys"] = [ + key + for key in input_keys_response["input_keys"] + if key not in langchain_object.memory.memory_variables + ] + # Add memory variables to memory_keys + input_keys_response["memory_keys"] = langchain_object.memory.memory_variables + + return input_keys_response diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index 666c5d3cd..5054bf107 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -6,6 +6,7 @@ from fastapi import ( status, ) from fastapi.responses import StreamingResponse +from langflow.api.utils import build_input_keys_response from langflow.api.v1.schemas import BuiltResponse, InitResponse, StreamData from langflow.chat.manager import ChatManager @@ -123,9 +124,7 @@ async def stream_build(flow_id: str): langchain_object = graph.build() # Now we need to check the input_keys to send them to the client if hasattr(langchain_object, "input_keys"): - input_keys_response = { - "input_keys": langchain_object.input_keys, - } + input_keys_response = build_input_keys_response(langchain_object) yield str(StreamData(event="input_keys", data=input_keys_response)) chat_manager.set_cache(flow_id, langchain_object)