Merge branch 'dev' of https://github.com/logspace-ai/langflow into 45-implement-agents-as-tools

This commit is contained in:
Gabriel Almeida 2023-03-26 09:05:52 -03:00
commit 8ea37c5bc0
16 changed files with 204 additions and 53 deletions

14
poetry.lock generated
View file

@ -2212,6 +2212,18 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
[[package]]
name = "types-pyyaml"
version = "6.0.12.8"
description = "Typing stubs for PyYAML"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "types-PyYAML-6.0.12.8.tar.gz", hash = "sha256:19304869a89d49af00be681e7b267414df213f4eb89634c4495fa62e8f942b9f"},
{file = "types_PyYAML-6.0.12.8-py3-none-any.whl", hash = "sha256:5314a4b2580999b2ea06b2e5f9a7763d860d6e09cdf21c0e9561daa9cbd60178"},
]
[[package]]
name = "typing-extensions"
version = "4.5.0"
@ -2407,4 +2419,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "ebc0a8ca9ea284d8e986306a10f13ff91e7e8aae18341c83329f5b1e1bcc66bd"
content-hash = "9acd2b7396be651321ac517873a398d1631a76918fefdb003f7f587f031d9ba1"

View file

@ -31,6 +31,7 @@ typer = "^0.7.0"
gunicorn = "^20.1.0"
langchain = "^0.0.113"
openai = "^0.27.2"
types-pyyaml = "^6.0.12.8"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"

View file

@ -1,12 +1,12 @@
import logging
import multiprocessing
import platform
from langflow.main import create_app
from pathlib import Path
import typer
from fastapi.staticfiles import StaticFiles
from pathlib import Path
import logging
from langflow.main import create_app
logger = logging.getLogger(__name__)

View file

@ -1,8 +1,9 @@
from fastapi import APIRouter, HTTPException
from langflow.interface.types import build_langchain_types_dict
from langflow.interface.run import process_data_graph
from typing import Any, Dict
from fastapi import APIRouter, HTTPException
from langflow.interface.run import process_data_graph
from langflow.interface.types import build_langchain_types_dict
# build router
router = APIRouter()

View file

@ -0,0 +1,27 @@
chains:
- LLMChain
- LLMMathChain
- LLMChecker
# - ConversationChain
agents:
- ZeroShotAgent
prompts:
- PromptTemplate
- FewShotPromptTemplate
llms:
- OpenAI
- OpenAIChat
tools:
- Search
- PAL-MATH
- Calculator
- Serper Search
memories:
# - ConversationBufferMemory
dev: false

View file

@ -1,6 +1,43 @@
## LLM
from typing import Any
from langchain import llms
from langchain.llms.openai import OpenAIChat
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["openai-chat"] = OpenAIChat
## Memory
# from langchain.memory.buffer_window import ConversationBufferWindowMemory
# from langchain.memory.chat_memory import ChatMessageHistory
# from langchain.memory.combined import CombinedMemory
# from langchain.memory.entity import ConversationEntityMemory
# from langchain.memory.kg import ConversationKGMemory
# from langchain.memory.readonly import ReadOnlySharedMemory
# from langchain.memory.simple import SimpleMemory
# from langchain.memory.summary import ConversationSummaryMemory
# from langchain.memory.summary_buffer import ConversationSummaryBufferMemory
memory_type_to_cls_dict: dict[str, Any] = {
# "CombinedMemory": CombinedMemory,
# "ConversationBufferWindowMemory": ConversationBufferWindowMemory,
# "ConversationBufferMemory": ConversationBufferMemory,
# "SimpleMemory": SimpleMemory,
# "ConversationSummaryBufferMemory": ConversationSummaryBufferMemory,
# "ConversationKGMemory": ConversationKGMemory,
# "ConversationEntityMemory": ConversationEntityMemory,
# "ConversationSummaryMemory": ConversationSummaryMemory,
# "ChatMessageHistory": ChatMessageHistory,
# "ConversationStringBufferMemory": ConversationStringBufferMemory,
# "ReadOnlySharedMemory": ReadOnlySharedMemory,
}
## Chain
# from langchain.chains.loading import type_to_loader_dict
# from langchain.chains.conversation.base import ConversationChain
# chain_type_to_cls_dict = type_to_loader_dict
# chain_type_to_cls_dict["conversation_chain"] = ConversationChain

View file

@ -1,9 +1,13 @@
from langchain import chains, agents, prompts
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langflow.custom import customs
from langflow.utils import util, allowed_components
from langchain import agents, chains, prompts
from langchain.agents.load_tools import get_all_tool_names
from langchain.chains.conversation import memory as memories
from langflow.custom import customs
from langflow.interface.custom_lists import (
llm_type_to_cls_dict,
memory_type_to_cls_dict,
)
from langflow.settings import settings
from langflow.utils import util
def get_type_dict():
@ -24,11 +28,10 @@ def list_type(object_type: str):
def list_agents():
"""List all agent types"""
# return list(agents.loading.AGENT_TO_CLASS.keys())
return [
agent.__name__
for agent in agents.loading.AGENT_TO_CLASS.values()
if agent.__name__ in allowed_components.AGENTS
if agent.__name__ in settings.agents or settings.dev
]
@ -38,7 +41,7 @@ def list_prompts():
library_prompts = [
prompt.__annotations__["return"].__name__
for prompt in prompts.loading.type_to_loader_dict.values()
if prompt.__annotations__["return"].__name__ in allowed_components.PROMPTS
if prompt.__annotations__["return"].__name__ in settings.prompts or settings.dev
]
return library_prompts + list(custom_prompts.keys())
@ -50,7 +53,7 @@ def list_tools():
for tool in get_all_tool_names():
tool_params = util.get_tool_params(util.get_tools_dict(tool))
if tool_params and tool_params["name"] in allowed_components.TOOLS:
if tool_params and tool_params["name"] in settings.tools or settings.dev:
tools.append(tool_params["name"])
tools.append("BaseTool")
return tools
@ -61,7 +64,7 @@ def list_llms():
return [
llm.__name__
for llm in llm_type_to_cls_dict.values()
if llm.__name__ in allowed_components.LLMS
if llm.__name__ in settings.llms or settings.dev
]
@ -70,10 +73,14 @@ def list_chain_types():
return [
chain.__annotations__["return"].__name__
for chain in chains.loading.type_to_loader_dict.values()
if chain.__annotations__["return"].__name__ in allowed_components.CHAINS
if chain.__annotations__["return"].__name__ in settings.chains or settings.dev
]
def list_memories():
"""List all memory types"""
return [memory.__name__ for memory in memories.type_to_cls_dict.values()]
return [
memory.__name__
for memory in memory_type_to_cls_dict.values()
if memory.__name__ in settings.memories or settings.dev
]

View file

@ -1,21 +1,12 @@
import json
from typing import Any, Dict, Optional
from langflow.interface.types import get_type_list
from langchain.agents.loading import load_agent_from_config
from langchain.chains.loading import load_chain_from_config
from langchain.llms.loading import load_llm_from_config
from langflow.utils import payload
from langflow.utils import util
from langchain.llms.base import BaseLLM
from langchain.agents.agent import AgentExecutor
from langchain.callbacks.base import BaseCallbackManager
from langchain.agents.tools import Tool
from langchain.agents.load_tools import (
_BASE_TOOLS,
_LLM_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from langchain.agents import agent as agent_module
from langflow.utils.graph import Graph
@ -23,6 +14,15 @@ from langflow.utils.graph import Graph
from langflow.interface.importing import import_by_type
from langchain.agents import ZeroShotAgent
from langchain.agents.loading import load_agent_from_config
from langchain.agents.tools import Tool
from langchain.callbacks.base import BaseCallbackManager
from langchain.chains.loading import load_chain_from_config
from langchain.llms.base import BaseLLM
from langchain.llms.loading import load_llm_from_config
from langflow.interface.types import get_type_list
from langflow.utils import payload, util
def instantiate_class(module_type: str, base_type: str, params: Dict) -> Any:

View file

@ -2,6 +2,7 @@ import contextlib
import io
import re
from typing import Any, Dict
from langflow.interface import loading

View file

@ -1,6 +1,6 @@
from typing import Dict, Any # noqa: F401
from typing import Any, Dict # noqa: F401
from langchain import agents, chains, prompts
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
@ -9,8 +9,12 @@ from langchain.agents.load_tools import (
get_all_tool_names,
)
from langflow.utils import util
from langflow.custom import customs
from langflow.interface.custom_lists import (
llm_type_to_cls_dict,
memory_type_to_cls_dict,
)
from langflow.utils import util
def get_signature(name: str, object_type: str):
@ -20,6 +24,7 @@ def get_signature(name: str, object_type: str):
"agents": get_agent_signature,
"prompts": get_prompt_signature,
"llms": get_llm_signature,
"memories": get_memory_signature,
"tools": get_tool_signature,
}.get(object_type, lambda name: f"Invalid type: {name}")(name)
@ -65,6 +70,14 @@ def get_llm_signature(name: str):
raise ValueError("LLM not found") from exc
def get_memory_signature(name: str):
"""Get the signature of a memory."""
try:
return util.build_template_from_class(name, memory_type_to_cls_dict)
except ValueError as exc:
raise ValueError("Memory not found") from exc
def get_tool_signature(name: str):
"""Get the signature of a tool."""

View file

@ -16,6 +16,7 @@ def get_type_list():
def build_langchain_types_dict():
"""Build a dictionary of all langchain types"""
return {
"chains": {
chain: get_signature(chain, "chains") for chain in list_type("chains")
@ -27,5 +28,9 @@ def build_langchain_types_dict():
prompt: get_signature(prompt, "prompts") for prompt in list_type("prompts")
},
"llms": {llm: get_signature(llm, "llms") for llm in list_type("llms")},
"memories": {
memory: get_signature(memory, "memories")
for memory in list_type("memories")
},
"tools": {tool: get_signature(tool, "tools") for tool in list_type("tools")},
}

View file

@ -1,8 +1,9 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langflow.api.endpoints import router as endpoints_router
from langflow.api.list_endpoints import router as list_router
from langflow.api.signature import router as signatures_router
from fastapi.middleware.cors import CORSMiddleware
def create_app():

View file

@ -0,0 +1,49 @@
import os
from typing import List, Optional
import yaml
from pydantic import BaseSettings, Field, root_validator
class Settings(BaseSettings):
chains: Optional[List[str]] = Field(...)
agents: Optional[List[str]] = Field(...)
prompts: Optional[List[str]] = Field(...)
llms: Optional[List[str]] = Field(...)
tools: Optional[List[str]] = Field(...)
memories: Optional[List[str]] = Field(...)
dev: bool = Field(...)
class Config:
validate_assignment = True
@root_validator
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:
settings_dict = settings.dict()
yaml.dump(settings_dict, f)
def load_settings_from_yaml(file_path: str) -> Settings:
# Check if a string is a valid path or a file name
if "/" not in file_path:
# Get current path
current_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(current_path, file_path)
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
a = Settings.parse_obj(settings_dict)
return a
settings = load_settings_from_yaml("config.yaml")

View file

@ -1,9 +0,0 @@
CHAINS = ["LLMChain", "LLMMathChain", "LLMChecker"]
AGENTS = ["ZeroShotAgent"]
PROMPTS = ["PromptTemplate", "FewShotPromptTemplate"]
LLMS = ["OpenAI", "OpenAIChat"]
TOOLS = ["Search", "PAL-MATH", "Calculator", "Serper Search"]

View file

@ -1,15 +1,15 @@
import ast
import importlib
import inspect
import re
import importlib
from typing import Dict, Optional
from langchain.agents.load_tools import (
_BASE_TOOLS,
_LLM_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
from typing import Optional, Dict
from langchain.agents.tools import Tool
@ -84,6 +84,7 @@ def build_template_from_class(
if v.__name__ == name:
_class = v
# Get the docstring
docs = get_class_doc(_class)
variables = {"_type": _type}
@ -238,11 +239,7 @@ def get_class_doc(class_name):
A dictionary containing the extracted information, with keys
for 'Description', 'Parameters', 'Attributes', and 'Returns'.
"""
# Get the class docstring
docstring = class_name.__doc__
# Parse the docstring to extract information
lines = docstring.split("\n")
# Template
data = {
"Description": "",
"Parameters": {},
@ -251,6 +248,15 @@ def get_class_doc(class_name):
"Returns": {},
}
# Get the class docstring
docstring = class_name.__doc__
if not docstring:
return data
# Parse the docstring to extract information
lines = docstring.split("\n")
current_section = "Description"
for line in lines:

View file

@ -54,5 +54,5 @@
"last 1 safari version"
]
},
"proxy": "http://backend:7860"
"proxy": "http://localhost:7860"
}