diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/langflow/api/v1/callback.py index d465f8684..d39c58c65 100644 --- a/src/backend/langflow/api/v1/callback.py +++ b/src/backend/langflow/api/v1/callback.py @@ -11,6 +11,7 @@ from fastapi import WebSocket from langchain.schema import AgentAction, LLMResult, AgentFinish from langflow.utils.logger import logger +from langflow.utils.util import remove_ansi_escape_codes # https://github.com/hwchase17/chat-langchain/blob/master/callback.py @@ -105,7 +106,8 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): # to the LLM, adding it will send the final prompt # to the frontend if "Prompt after formatting" in text: - text = text.replace("Prompt after formatting", "") + text = text.replace("Prompt after formatting:\n", "") + text = remove_ansi_escape_codes(text) resp = ChatResponse( message="", type="stream", diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py index f68c9dbe2..6b8cddd69 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/langflow/utils/util.py @@ -12,6 +12,10 @@ from langflow.utils.logger import logger from multiprocess import cpu_count # type: ignore +def remove_ansi_escape_codes(text): + return re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", text) + + def build_template_from_function( name: str, type_to_loader_dict: Dict, add_function: bool = False ):