From c333f57a0c59949befc66522a469ba85c684639b Mon Sep 17 00:00:00 2001 From: Ibis Prevedello Date: Sun, 5 Mar 2023 23:32:42 -0300 Subject: [PATCH] feat: add thought to response --- langflow/backend/build_and_push | 4 ++-- langflow/backend/endpoints.py | 26 ++++++++++++++++++++------ langflow/frontend/build_and_push | 4 ++-- 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/langflow/backend/build_and_push b/langflow/backend/build_and_push index c3cec99b6..e65a79317 100755 --- a/langflow/backend/build_and_push +++ b/langflow/backend/build_and_push @@ -3,6 +3,6 @@ poetry remove langchain docker build -t logspace/backend_build -f build.Dockerfile . VERSION=$(toml get --toml-path pyproject.toml tool.poetry.version) -docker build --build-arg VERSION=$VERSION -t logspace/langflow_backend:$VERSION . -# docker push logspace/langflow_backend:$VERSION +docker build --build-arg VERSION=$VERSION -t ibiscp/langflow_backend:$VERSION . +docker push ibiscp/langflow_backend:$VERSION # poetry add --editable ../../../langchain diff --git a/langflow/backend/endpoints.py b/langflow/backend/endpoints.py index a1db8fedd..bf143a0f0 100644 --- a/langflow/backend/endpoints.py +++ b/langflow/backend/endpoints.py @@ -8,6 +8,9 @@ from langchain.llms.loading import load_llm_from_config from langchain.prompts.loading import load_prompt_from_config from typing import Any +import io +import contextlib +import re # build router @@ -77,12 +80,12 @@ def get_load(data: dict[str, Any]): type_list = get_type_list() # Substitute ZeroShotPromt with PromptTemplate - for node in data['nodes']: + for node in data["nodes"]: if node["data"]["type"] == "ZeroShotPrompt": # Build Prompt Template tools = [ tool - for tool in data['nodes'] + for tool in data["nodes"] if tool["type"] != "chatOutputNode" and "Tool" in tool["data"]["node"]["base_classes"] ] @@ -104,17 +107,28 @@ def get_load(data: dict[str, Any]): if extracted_json["_type"] in type_list["agents"]: loaded = load_agent_executor_from_config(extracted_json) - return {"result": loaded.run(message)} + with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): + result = loaded.run(message) + thought = output_buffer.getvalue() + elif extracted_json["_type"] in type_list["chains"]: loaded = load_chain_from_config(extracted_json) - return {"result": loaded.run(message)} + with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): + result = loaded.run(message) + thought = output_buffer.getvalue() + elif extracted_json["_type"] in type_list["llms"]: loaded = load_llm_from_config(extracted_json) - return {"result": loaded(message)} + with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): + result = loaded(message) + thought = output_buffer.getvalue() else: - return {"result": "Error: Type should be either agent, chain or llm"} + result = "Error: Type should be either agent, chain or llm" + thought = "" + + return {"result": result, "thought": re.sub(r'\x1b\[([0-9,A-Z]{1,2}(;[0-9,A-Z]{1,2})?)?[m|K]', '', thought).strip()} def build_prompt_template(prompt, tools): diff --git a/langflow/frontend/build_and_push b/langflow/frontend/build_and_push index 338777e52..66c11df94 100755 --- a/langflow/frontend/build_and_push +++ b/langflow/frontend/build_and_push @@ -2,5 +2,5 @@ VERSION="0.1.0" docker build -t logspace/frontend_build -f build.Dockerfile . -docker build --build-arg VERSION=$VERSION -t logspace/langflow_frontend:$VERSION . -# docker push logspace/langflow_frontend:$VERSION +docker build --build-arg VERSION=$VERSION -t ibiscp/langflow_frontend:$VERSION . +docker push ibiscp/langflow_frontend:$VERSION