feat: add chat-gpt

This commit is contained in:
Ibis Prevedello 2023-03-20 10:09:51 -03:00
commit 38141943f6
7 changed files with 21 additions and 10 deletions

9
poetry.lock generated
View file

@ -1378,13 +1378,14 @@ files = [
[[package]]
name = "openai"
version = "0.26.5"
version = "0.27.2"
description = "Python client library for the OpenAI API"
category = "main"
optional = false
python-versions = ">=3.7.1"
files = [
{file = "openai-0.26.5.tar.gz", hash = "sha256:2882a59c67ae33c2716a04389a6e6680d061f073424953732f917fde219addfd"},
{file = "openai-0.27.2-py3-none-any.whl", hash = "sha256:6df674cf257e9e0504f1fd191c333d3f6a2442b13218d0eccf06230eb24d320e"},
{file = "openai-0.27.2.tar.gz", hash = "sha256:5869fdfa34b0ec66c39afa22f4a0fb83a135dff81f6505f52834c6ab3113f762"},
]
[package.dependencies]
@ -1395,7 +1396,7 @@ tqdm = "*"
[package.extras]
datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"]
embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "sklearn", "tenacity (>=8.0.1)"]
embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"]
wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"]
[[package]]
@ -2406,4 +2407,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "9287450d71ec7c3bd7b1ef2766e22b86fb92bf0e2f51260adf039b1175c74f7c"
content-hash = "ebc0a8ca9ea284d8e986306a10f13ff91e7e8aae18341c83329f5b1e1bcc66bd"

View file

@ -15,7 +15,6 @@ langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = "^3.9"
openai = "^0.26.5"
fastapi = "^0.92.0"
uvicorn = "^0.20.0"
beautifulsoup4 = "^4.11.2"
@ -24,6 +23,7 @@ google-api-python-client = "^2.79.0"
typer = "^0.7.0"
gunicorn = "^20.1.0"
langchain = "^0.0.113"
openai = "^0.27.2"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"

View file

@ -0,0 +1,6 @@
from langchain import llms
from langchain.llms.openai import OpenAIChat
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["openai-chat"] = OpenAIChat

View file

@ -1,4 +1,5 @@
from langchain import chains, agents, prompts, llms
from langchain import chains, agents, prompts
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langflow.custom import customs
from langflow.utils import util, allowed_components
from langchain.agents.load_tools import get_all_tool_names
@ -55,7 +56,7 @@ def list_llms():
"""List all llm types"""
return [
llm.__name__
for llm in llms.type_to_cls_dict.values()
for llm in llm_type_to_cls_dict.values()
if llm.__name__ in allowed_components.LLMS
]

View file

@ -1,5 +1,6 @@
from typing import Dict, Any # noqa: F401
from langchain import agents, chains, llms, prompts
from langchain import agents, chains, prompts
from langflow.interface.custom_lists import llm_type_to_cls_dict
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
@ -56,7 +57,7 @@ def get_prompt_signature(name: str):
def get_llm_signature(name: str):
"""Get the signature of an llm."""
try:
return util.build_template_from_class(name, llms.type_to_cls_dict)
return util.build_template_from_class(name, llm_type_to_cls_dict)
except ValueError as exc:
raise ValueError("LLM not found") from exc

View file

@ -4,6 +4,6 @@ AGENTS = ["ZeroShotAgent"]
PROMPTS = ["PromptTemplate", "FewShotPromptTemplate"]
LLMS = ["OpenAI"]
LLMS = ["OpenAI", "OpenAIChat"]
TOOLS = ["Search", "PAL-MATH", "Calculator", "Serper Search"]

View file

@ -297,6 +297,8 @@ def format_dict(d, name: Optional[str] = None):
# Add options to openai
if name == "OpenAI" and key == "model_name":
value["options"] = ["text-davinci-003", "text-davinci-002"]
elif name == "OpenAIChat" and key == "model_name":
value["options"] = ["gpt-3.5-turbo", "gpt-4"]
return d