feat: adding intermediate steps processing

This was required because if the user uses log-level debug the logs show up in the chat
This commit is contained in:
Gabriel Almeida 2023-04-03 10:11:53 -03:00
commit 3e66c5a5ce
3 changed files with 42 additions and 11 deletions

View file

@ -109,7 +109,7 @@ class InitializeAgent(AgentExecutor):
def initialize(
cls, llm: BaseLLM, tools: List[Tool], agent: str, memory: BaseChatMemory
):
return initialize_agent(tools=tools, llm=llm, agent=agent, memory=memory)
return initialize_agent(tools=tools, llm=llm, agent=agent, memory=memory, return_intermediate_steps=True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

View file

@ -1,7 +1,7 @@
import contextlib
import io
import re
from typing import Any, Dict
from typing import Any, Dict, List, Tuple
from langflow.cache.utils import compute_hash, load_cache, save_cache
from langflow.graph.graph import Graph
@ -109,15 +109,40 @@ def get_result_and_thought(extracted_json: Dict[str, Any], message: str):
config=extracted_json
)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
result = loaded_langchain(message)
result = (
result.get(loaded_langchain.output_keys[0])
if isinstance(result, dict)
else result
output = loaded_langchain(message)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
thought = output_buffer.getvalue()
result = (
output.get(loaded_langchain.output_keys[0])
if isinstance(output, dict)
else output
)
if intermediate_steps:
thought = format_intermediate_steps(intermediate_steps)
else:
thought = output_buffer.getvalue()
except Exception as e:
result = f"Error: {str(e)}"
thought = ""
return result, thought
def format_intermediate_steps(intermediate_steps):
formatted_chain = "> Entering new AgentExecutor chain...\n"
for step in intermediate_steps:
action = step[0]
observation = step[1]
formatted_chain += (
f" {action.log}\nAction: {action.tool}\nAction Input: {action.tool_input}\n"
)
formatted_chain += f"Observation: {observation}\n"
final_answer = f"Final Answer: {observation}\n"
formatted_chain += f"Thought: I now know the final answer\n{final_answer}\n"
formatted_chain += "> Finished chain.\n"
return formatted_chain

View file

@ -1,14 +1,20 @@
import logging
from pathlib import Path
from rich.logging import RichHandler
logger = logging.getLogger("langflow")
def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore
log_format = "%(asctime)s - %(levelname)s - %(message)s"
log_level = getattr(logging, log_level.upper(), logging.INFO)
log_level_value = getattr(logging, log_level.upper(), logging.INFO)
logging.basicConfig(level=log_level, format=log_format)
logging.basicConfig(
level=log_level_value,
format=log_format,
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True)],
)
if log_file:
log_file = Path(log_file)
@ -18,6 +24,6 @@ def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore
file_handler.setFormatter(logging.Formatter(log_format))
logger.addHandler(file_handler)
logger.info(f"Logger set up with log level: {log_level}")
logger.info(f"Logger set up with log level: {log_level_value}({log_level})")
if log_file:
logger.info(f"Log file: {log_file}")