From 3e66c5a5ce478340a2e19d27529a25b47b6c1fae Mon Sep 17 00:00:00 2001 From: Gabriel Almeida Date: Mon, 3 Apr 2023 10:11:53 -0300 Subject: [PATCH] feat: adding intermediate steps processing This was required because if the user uses log-level debug the logs show up in the chat --- .../langflow/interface/agents/custom.py | 2 +- src/backend/langflow/interface/run.py | 39 +++++++++++++++---- src/backend/langflow/utils/logger.py | 12 ++++-- 3 files changed, 42 insertions(+), 11 deletions(-) diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py index e5ae77743..593dc7f3e 100644 --- a/src/backend/langflow/interface/agents/custom.py +++ b/src/backend/langflow/interface/agents/custom.py @@ -109,7 +109,7 @@ class InitializeAgent(AgentExecutor): def initialize( cls, llm: BaseLLM, tools: List[Tool], agent: str, memory: BaseChatMemory ): - return initialize_agent(tools=tools, llm=llm, agent=agent, memory=memory) + return initialize_agent(tools=tools, llm=llm, agent=agent, memory=memory, return_intermediate_steps=True def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 34259d7ad..803d69ae1 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -1,7 +1,7 @@ import contextlib import io import re -from typing import Any, Dict +from typing import Any, Dict, List, Tuple from langflow.cache.utils import compute_hash, load_cache, save_cache from langflow.graph.graph import Graph @@ -109,15 +109,40 @@ def get_result_and_thought(extracted_json: Dict[str, Any], message: str): config=extracted_json ) with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer): - result = loaded_langchain(message) - result = ( - result.get(loaded_langchain.output_keys[0]) - if isinstance(result, dict) - else result + output = loaded_langchain(message) + intermediate_steps = ( + output.get("intermediate_steps", []) if isinstance(output, dict) else [] ) - thought = output_buffer.getvalue() + result = ( + output.get(loaded_langchain.output_keys[0]) + if isinstance(output, dict) + else output + ) + + if intermediate_steps: + thought = format_intermediate_steps(intermediate_steps) + else: + thought = output_buffer.getvalue() except Exception as e: result = f"Error: {str(e)}" thought = "" return result, thought + + +def format_intermediate_steps(intermediate_steps): + formatted_chain = "> Entering new AgentExecutor chain...\n" + for step in intermediate_steps: + action = step[0] + observation = step[1] + + formatted_chain += ( + f" {action.log}\nAction: {action.tool}\nAction Input: {action.tool_input}\n" + ) + formatted_chain += f"Observation: {observation}\n" + + final_answer = f"Final Answer: {observation}\n" + formatted_chain += f"Thought: I now know the final answer\n{final_answer}\n" + formatted_chain += "> Finished chain.\n" + + return formatted_chain diff --git a/src/backend/langflow/utils/logger.py b/src/backend/langflow/utils/logger.py index f0db10475..2d04a7706 100644 --- a/src/backend/langflow/utils/logger.py +++ b/src/backend/langflow/utils/logger.py @@ -1,14 +1,20 @@ import logging from pathlib import Path +from rich.logging import RichHandler logger = logging.getLogger("langflow") def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore log_format = "%(asctime)s - %(levelname)s - %(message)s" - log_level = getattr(logging, log_level.upper(), logging.INFO) + log_level_value = getattr(logging, log_level.upper(), logging.INFO) - logging.basicConfig(level=log_level, format=log_format) + logging.basicConfig( + level=log_level_value, + format=log_format, + datefmt="[%X]", + handlers=[RichHandler(rich_tracebacks=True)], + ) if log_file: log_file = Path(log_file) @@ -18,6 +24,6 @@ def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore file_handler.setFormatter(logging.Formatter(log_format)) logger.addHandler(file_handler) - logger.info(f"Logger set up with log level: {log_level}") + logger.info(f"Logger set up with log level: {log_level_value}({log_level})") if log_file: logger.info(f"Log file: {log_file}")