diff --git a/.gitignore b/.gitignore index c1decc34c..80d5837cd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +# This is to avoid Opencommit hook from getting pushed +prepare-commit-msg # Logs logs *.log diff --git a/src/backend/langflow/api/v1/base.py b/src/backend/langflow/api/v1/base.py index 6941bedf3..d595210bb 100644 --- a/src/backend/langflow/api/v1/base.py +++ b/src/backend/langflow/api/v1/base.py @@ -1,6 +1,7 @@ from pydantic import BaseModel, validator from langflow.interface.utils import extract_input_variables_from_prompt +from langchain.prompts import PromptTemplate class CacheResponse(BaseModel): @@ -57,6 +58,13 @@ def validate_prompt(template: str): # Check if there are invalid characters in the input_variables input_variables = check_input_variables(input_variables) + try: + PromptTemplate(template=template, input_variables=input_variables) + except Exception as exc: + raise ValueError(str(exc)) from exc + + # if len(input_variables) > 1: + # # If there's more than one input variable return PromptValidationResponse(input_variables=input_variables) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index be9d6802c..666c5d3cd 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -120,7 +120,15 @@ async def stream_build(flow_id: str): yield str(StreamData(event="message", data=response)) - chat_manager.set_cache(flow_id, graph.build()) + langchain_object = graph.build() + # Now we need to check the input_keys to send them to the client + if hasattr(langchain_object, "input_keys"): + input_keys_response = { + "input_keys": langchain_object.input_keys, + } + yield str(StreamData(event="input_keys", data=input_keys_response)) + + chat_manager.set_cache(flow_id, langchain_object) except Exception as exc: logger.error("Error while building the flow: %s", exc) yield str(StreamData(event="error", data={"error": str(exc)}))