From 31e02fe25c4e5a1db66ea45a3f1a12321aff9d60 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 9 Aug 2023 21:37:11 -0300 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20feat(ConversationalAgent.py):=20add?= =?UTF-8?q?=20ConversationalAgent=20component=20to=20handle=20conversation?= =?UTF-8?q?al=20interactions=20using=20OpenAI's=20function=20calling=20API?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds a new file `ConversationalAgent.py` to the `src/backend/langflow/components/agents` directory. The `ConversationalAgent` class is a custom component that represents a conversational agent capable of using OpenAI's function calling API. The `ConversationalAgent` class has the following features: - It inherits from the `CustomComponent` class. - It has a `display_name` attribute set to "OpenaAI Conversational Agent". - It has a `description` attribute set to "Conversational Agent that can use OpenAI's function calling API". - It implements the `build_config` method to define the configuration options for the agent. - It implements the `build` method to create an instance of the `AgentExecutor` class, which represents the agent's execution environment. - The `build` method takes several parameters, including `model_name`, `tools`, `memory`, `system_message`, and `max_token_limit`. - It uses the `ChatOpenAI` class from the `langchain.chat_models` module to create an instance of the OpenAI language model. - It uses the `ConversationTokenBufferMemory` class from the `langchain.memory.token_buffer` module to handle conversation history and token buffering. - It uses the `OpenAIFunctionsAgent` class from the `langchain.agents.openai_functions_agent.base` module to create an instance of the OpenAI functions agent. - It returns an instance of the `AgentExecutor` class with the agent, tools, memory, verbose, and return_intermediate_steps parameters set. 📝 feat(__init__.py): add empty __init__.py file to the agents directory This commit adds an empty `__init__.py` file to the `src/backend/langflow/components/agents` directory. The `__init__.py` file is necessary to make the `agents` directory a Python package. --- .../components/agents/ConversationalAgent.py | 71 +++++++++++++++++++ .../langflow/components/agents/__init__.py | 0 2 files changed, 71 insertions(+) create mode 100644 src/backend/langflow/components/agents/ConversationalAgent.py create mode 100644 src/backend/langflow/components/agents/__init__.py diff --git a/src/backend/langflow/components/agents/ConversationalAgent.py b/src/backend/langflow/components/agents/ConversationalAgent.py new file mode 100644 index 000000000..9c2bb400b --- /dev/null +++ b/src/backend/langflow/components/agents/ConversationalAgent.py @@ -0,0 +1,71 @@ +from langflow import CustomComponent +from typing import Optional +from langchain.prompts import SystemMessagePromptTemplate +from langchain.tools import Tool +from langchain.schema.memory import BaseMemory +from langchain.chat_models import ChatOpenAI + +from langchain.agents.agent import AgentExecutor +from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent +from langchain.memory.token_buffer import ConversationTokenBufferMemory +from langchain.prompts.chat import MessagesPlaceholder +from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import ( + _get_default_system_message, +) + + +class ConversationalAgent(CustomComponent): + display_name: str = "OpenaAI Conversational Agent" + description: str = "Conversational Agent that can use OpenAI's function calling API" + + def build_config(self): + openai_function_models = [ + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613", + "gpt-4-0613", + "gpt-4-32k-0613", + ] + return { + "tools": {"is_list": True}, + "model_name": { + "display_name": "Model Name", + "options": openai_function_models, + "value": openai_function_models[0], + }, + "code": {"show": False}, + } + + def build( + self, + model_name: str, + tools: Tool, + memory: Optional[BaseMemory] = None, + system_message: Optional[SystemMessagePromptTemplate] = None, + max_token_limit: int = 2000, + ) -> AgentExecutor: + llm = ChatOpenAI(model_name=model_name) + if not memory: + memory_key = "chat_history" + memory = ConversationTokenBufferMemory( + memory_key=memory_key, + return_messages=True, + output_key="output", + llm=llm, + max_token_limit=max_token_limit, + ) + else: + memory_key = memory.memory_key + + _system_message = system_message or _get_default_system_message() + prompt = OpenAIFunctionsAgent.create_prompt( + system_message=_system_message, + extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)], + ) + agent = OpenAIFunctionsAgent(llm=llm, tools=tools, prompt=prompt) + return AgentExecutor( + agent=agent, + tools=tools, + memory=memory, + verbose=True, + return_intermediate_steps=True, + ) diff --git a/src/backend/langflow/components/agents/__init__.py b/src/backend/langflow/components/agents/__init__.py new file mode 100644 index 000000000..e69de29bb