From e4646b1e5fc11a0e70d2934b4cf321c427be37e2 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 23 Aug 2023 10:05:56 -0300 Subject: [PATCH] =?UTF-8?q?=F0=9F=90=9B=20fix(callback.py):=20remove=20"Pr?= =?UTF-8?q?ompt=20after=20formatting:\n"=20from=20text=20to=20improve=20fo?= =?UTF-8?q?rmatting=20=F0=9F=90=9B=20fix(util.py):=20add=20remove=5Fansi?= =?UTF-8?q?=5Fescape=5Fcodes=20function=20to=20remove=20ANSI=20escape=20co?= =?UTF-8?q?des=20from=20text?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/backend/langflow/api/v1/callback.py | 4 +++- src/backend/langflow/utils/util.py | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/langflow/api/v1/callback.py index d465f8684..d39c58c65 100644 --- a/src/backend/langflow/api/v1/callback.py +++ b/src/backend/langflow/api/v1/callback.py @@ -11,6 +11,7 @@ from fastapi import WebSocket from langchain.schema import AgentAction, LLMResult, AgentFinish from langflow.utils.logger import logger +from langflow.utils.util import remove_ansi_escape_codes # https://github.com/hwchase17/chat-langchain/blob/master/callback.py @@ -105,7 +106,8 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): # to the LLM, adding it will send the final prompt # to the frontend if "Prompt after formatting" in text: - text = text.replace("Prompt after formatting", "") + text = text.replace("Prompt after formatting:\n", "") + text = remove_ansi_escape_codes(text) resp = ChatResponse( message="", type="stream", diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py index f68c9dbe2..6b8cddd69 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/langflow/utils/util.py @@ -12,6 +12,10 @@ from langflow.utils.logger import logger from multiprocess import cpu_count # type: ignore +def remove_ansi_escape_codes(text): + return re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", text) + + def build_template_from_function( name: str, type_to_loader_dict: Dict, add_function: bool = False ):