Fixes a bug in fix_memory_keys, Adds OpenAI ConversationalAgent (#753)

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-08-10 17:50:26 -03:00 committed by GitHub
commit 9bc616672c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 113 additions and 19 deletions

20
poetry.lock generated
View file

@ -253,13 +253,13 @@ test = ["astroid", "pytest"]
[[package]]
name = "async-timeout"
version = "4.0.2"
version = "4.0.3"
description = "Timeout context manager for asyncio programs"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
files = [
{file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
{file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
[[package]]
@ -739,13 +739,13 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
[[package]]
name = "cohere"
version = "4.19.2"
version = "4.19.3"
description = ""
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "cohere-4.19.2-py3-none-any.whl", hash = "sha256:0b6a4fe04380a481a8e975ebcc9bb6433febe4d3eb583b6d6e04342a5e998345"},
{file = "cohere-4.19.2.tar.gz", hash = "sha256:a0b0fa698b3d3983fb328bb90d68fcf08faaa2268f3772ebc6bfea6ba55acf27"},
{file = "cohere-4.19.3-py3-none-any.whl", hash = "sha256:6c98f1e58b93b6316c824385c1d2032ed352280e9efa5695ba98306258abf84f"},
{file = "cohere-4.19.3.tar.gz", hash = "sha256:c3aaa716c4da7d7a8ed68705fcdc92f1b1a2260b737cee6bd27af5c347f31496"},
]
[package.dependencies]
@ -1644,13 +1644,13 @@ six = "*"
[[package]]
name = "google-cloud-aiplatform"
version = "1.29.0"
version = "1.30.0"
description = "Vertex AI API client library"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-cloud-aiplatform-1.29.0.tar.gz", hash = "sha256:fceabb924d2d26057e3c8c5c2e251929389aa6d553361377bc402781150c0db3"},
{file = "google_cloud_aiplatform-1.29.0-py2.py3-none-any.whl", hash = "sha256:cf81c1d93c61ccf3df60a65e3a5a1e465e044059d36b6fc1202b940c46c4c1e1"},
{file = "google-cloud-aiplatform-1.30.0.tar.gz", hash = "sha256:26c16069553d177c3277a4371279871c38e31eab1134906cf83ea905e4203ec4"},
{file = "google_cloud_aiplatform-1.30.0-py2.py3-none-any.whl", hash = "sha256:8a5d47378babb491cf4737e4f1951289b33b9254bed1e3d8c07be4896cae83ce"},
]
[package.dependencies]

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.4.7"
version = "0.4.8"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [

View file

@ -0,0 +1,76 @@
from langflow import CustomComponent
from typing import Optional
from langchain.prompts import SystemMessagePromptTemplate
from langchain.tools import Tool
from langchain.schema.memory import BaseMemory
from langchain.chat_models import ChatOpenAI
from langchain.agents.agent import AgentExecutor
from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
from langchain.memory.token_buffer import ConversationTokenBufferMemory
from langchain.prompts.chat import MessagesPlaceholder
from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import (
_get_default_system_message,
)
class ConversationalAgent(CustomComponent):
display_name: str = "OpenaAI Conversational Agent"
description: str = "Conversational Agent that can use OpenAI's function calling API"
def build_config(self):
openai_function_models = [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k-0613",
"gpt-4-0613",
"gpt-4-32k-0613",
]
return {
"tools": {"is_list": True, "display_name": "Tools"},
"memory": {"display_name": "Memory"},
"system_message": {"display_name": "System Message"},
"max_token_limit": {"display_name": "Max Token Limit"},
"model_name": {
"display_name": "Model Name",
"options": openai_function_models,
"value": openai_function_models[0],
},
"code": {"show": False},
}
def build(
self,
model_name: str,
tools: Tool,
memory: Optional[BaseMemory] = None,
system_message: Optional[SystemMessagePromptTemplate] = None,
max_token_limit: int = 2000,
) -> AgentExecutor:
llm = ChatOpenAI(model=model_name)
if not memory:
memory_key = "chat_history"
memory = ConversationTokenBufferMemory(
memory_key=memory_key,
return_messages=True,
output_key="output",
llm=llm,
max_token_limit=max_token_limit,
)
else:
memory_key = memory.memory_key # type: ignore
_system_message = system_message or _get_default_system_message()
prompt = OpenAIFunctionsAgent.create_prompt(
system_message=_system_message, # type: ignore
extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)],
)
agent = OpenAIFunctionsAgent(
llm=llm, tools=tools, prompt=prompt # type: ignore
)
return AgentExecutor(
agent=agent,
tools=tools, # type: ignore
memory=memory,
verbose=True,
return_intermediate_steps=True,
)

View file

@ -3,6 +3,10 @@ from typing import Any, Union
from langflow.interface.utils import extract_input_variables_from_prompt
class UnbuiltObject:
pass
def validate_prompt(prompt: str):
"""Validate prompt."""
if extract_input_variables_from_prompt(prompt):

View file

@ -1,4 +1,5 @@
import ast
from langflow.graph.utils import UnbuiltObject
from langflow.interface.initialize import loading
from langflow.interface.listing import ALL_TYPES_DICT
from langflow.utils.constants import DIRECT_TYPES
@ -22,7 +23,7 @@ class Vertex:
self.edges: List["Edge"] = []
self.base_type: Optional[str] = base_type
self._parse_data()
self._built_object = None
self._built_object = UnbuiltObject()
self._built = False
self.artifacts: Dict[str, Any] = {}
@ -245,8 +246,14 @@ class Vertex:
"""
Checks if the built object is None and raises a ValueError if so.
"""
if self._built_object is None:
raise ValueError(f"Node type {self.vertex_type} not found")
if isinstance(self._built_object, UnbuiltObject):
raise ValueError(f"{self.vertex_type}: {self._built_object_repr()}")
elif self._built_object is None:
message = f"{self.vertex_type} returned None."
if self.base_type == "custom_components":
message += " Make sure your build method returns a component."
raise ValueError(message)
def build(self, force: bool = False) -> Any:
if not self._built or force:

View file

@ -8,10 +8,13 @@ from langchain.text_splitter import TextSplitter
from langchain.tools import Tool
from langchain.vectorstores.base import VectorStore
from langchain.schema import BaseOutputParser
from langchain.schema.memory import BaseMemory
from langchain.memory.chat_memory import BaseChatMemory
from langchain.agents.agent import AgentExecutor
LANGCHAIN_BASE_TYPES = {
"Chain": Chain,
"AgentExecutor": AgentExecutor,
"Tool": Tool,
"BaseLLM": BaseLLM,
"PromptTemplate": PromptTemplate,
@ -22,6 +25,8 @@ LANGCHAIN_BASE_TYPES = {
"Embeddings": Embeddings,
"BaseRetriever": BaseRetriever,
"BaseOutputParser": BaseOutputParser,
"BaseMemory": BaseMemory,
"BaseChatMemory": BaseChatMemory,
}
# Langchain base types plus Python base types

View file

@ -50,8 +50,8 @@ class CustomComponent(Component, extra=Extra.allow):
for type_hint in TYPE_HINT_LIST:
if reader._is_type_hint_used_in_args(
"Optional", code
) and not reader._is_type_hint_imported("Optional", code):
type_hint, code
) and not reader._is_type_hint_imported(type_hint, code):
error_detail = {
"error": "Type hint Error",
"traceback": f"Type hint '{type_hint}' is used but not imported in the code.",

View file

@ -5,6 +5,7 @@ from langflow.api.v1.callback import (
)
from langflow.processing.process import fix_memory_inputs, format_actions
from langflow.utils.logger import logger
from langchain.agents.agent import AgentExecutor
async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwargs):
@ -20,7 +21,8 @@ async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwa
# to display intermediate steps
langchain_object.return_intermediate_steps = True
try:
fix_memory_inputs(langchain_object)
if not isinstance(langchain_object, AgentExecutor):
fix_memory_inputs(langchain_object)
except Exception as exc:
logger.error(f"Error fixing memory inputs: {exc}")

View file

@ -27,4 +27,4 @@ const PopoverContent = React.forwardRef<
));
PopoverContent.displayName = PopoverPrimitive.Content.displayName;
export { Popover, PopoverTrigger, PopoverContent };
export { Popover, PopoverContent, PopoverTrigger };