🔨 refactor(utils.py): extract build_input_keys_response function to improve code reusability

🚀 feat(chat.py): use build_input_keys_response function to build input keys response
The build_input_keys_response function was extracted from the remove_api_keys function to improve code reusability. It builds the input keys response object, which contains the input keys and memory keys of a langchain object. The chat.py file now uses this function to build the input keys response object instead of building it manually. This improves code readability and reduces code duplication.
This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-06-26 10:52:53 -03:00
commit 89286c6456
2 changed files with 25 additions and 3 deletions

View file

@ -22,3 +22,26 @@ def remove_api_keys(flow: dict):
value["value"] = None
return flow
def build_input_keys_response(langchain_object):
"""Build the input keys response."""
input_keys_response = {
"input_keys": langchain_object.input_keys,
"memory_keys": [],
}
# If the object has memory, that memory will have a memory_variables attribute
# memory variables should be removed from the input keys
if hasattr(langchain_object, "memory") and hasattr(
langchain_object.memory, "memory_variables"
):
# Remove memory variables from input keys
input_keys_response["input_keys"] = [
key
for key in input_keys_response["input_keys"]
if key not in langchain_object.memory.memory_variables
]
# Add memory variables to memory_keys
input_keys_response["memory_keys"] = langchain_object.memory.memory_variables
return input_keys_response

View file

@ -6,6 +6,7 @@ from fastapi import (
status,
)
from fastapi.responses import StreamingResponse
from langflow.api.utils import build_input_keys_response
from langflow.api.v1.schemas import BuiltResponse, InitResponse, StreamData
from langflow.chat.manager import ChatManager
@ -123,9 +124,7 @@ async def stream_build(flow_id: str):
langchain_object = graph.build()
# Now we need to check the input_keys to send them to the client
if hasattr(langchain_object, "input_keys"):
input_keys_response = {
"input_keys": langchain_object.input_keys,
}
input_keys_response = build_input_keys_response(langchain_object)
yield str(StreamData(event="input_keys", data=input_keys_response))
chat_manager.set_cache(flow_id, langchain_object)