Add model names for Anthropic and add ChatAnthropic (preferred over the deprecated Anthropic LLM).

This commit is contained in:
Jim White 2023-06-02 12:08:57 -07:00
commit 07afc08129
6 changed files with 24 additions and 1 deletions

View file

@ -56,6 +56,7 @@ llms:
- CTransformers
- Cohere
- Anthropic
- ChatAnthropic
memories:
- ConversationBufferMemory
- ConversationSummaryMemory

View file

@ -11,12 +11,14 @@ from langchain import (
text_splitter,
)
from langchain.agents import agent_toolkits
from langchain.chat_models import ChatAnthropic
from langchain.chat_models import ChatOpenAI
from langflow.interface.importing.utils import import_class
## LLMs
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["anthropic-chat"] = ChatAnthropic # type: ignore
llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore
## Chains

View file

@ -125,6 +125,9 @@ class FrontendNode(BaseModel):
elif name == "ChatOpenAI" and key == "model_name":
field.options = constants.CHAT_OPENAI_MODELS
field.is_list = True
elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name":
field.options = constants.ANTHROPIC_MODELS
field.is_list = True
if "api_key" in key and "OpenAI" in str(name):
field.display_name = "OpenAI API Key"
field.required = False

View file

@ -7,6 +7,20 @@ OPENAI_MODELS = [
]
CHAT_OPENAI_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"]
ANTHROPIC_MODELS = [
"claude-v1", # largest model, ideal for a wide range of more complex tasks.
"claude-v1-100k", # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window.
"claude-instant-v1", # A smaller model with far lower latency, sampling at roughly 40 words/sec!
"claude-instant-v1-100k", # An enhanced version of claude-instant-v1 with a 100,000 token context window that retains its performance.
# Specific sub-versions of the above models:
"claude-v1.3", # Compared to claude-v1.2, it's more robust against red-team inputs, better at precise instruction-following, better at code, and better and non-English dialogue and writing.
"claude-v1.3-100k", # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window.
"claude-v1.2", # An improved version of claude-v1. It is slightly improved at general helpfulness, instruction following, coding, and other tasks. It is also considerably better with non-English languages. This model also has the ability to role play (in harmless ways) more consistently, and it defaults to writing somewhat longer and more thorough responses.
"claude-v1.0", # An earlier version of claude-v1.
"claude-instant-v1.1", # latest version of claude-instant-v1. It is better than claude-instant-v1.0 at a wide variety of tasks including writing, coding, and instruction following.
"claude-instant-v1.1-100k", # An enhanced version of claude-instant-v1.1 with a 100,000 token context window that retains its lightning fast 40 word/sec performance.
"claude-instant-v1.0", # An earlier version of claude-instant-v1.
]
DEFAULT_PYTHON_FUNCTION = """
def python_function(text: str) -> str:

View file

@ -302,7 +302,9 @@ def format_dict(d, name: Optional[str] = None):
elif name == "ChatOpenAI" and key == "model_name":
value["options"] = constants.CHAT_OPENAI_MODELS
value["list"] = True
elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name":
value["options"] = constants.ANTHROPIC_MODELS
value["list"] = True
return d

View file

@ -155,6 +155,7 @@ export const nodeIcons: {
// SerpAPIWrapper: SerperIcon,
// AZLyricsLoader: AzIcon,
Anthropic: AnthropicIcon,
ChatAnthropic: AnthropicIcon,
BingSearchAPIWrapper: BingIcon,
BingSearchRun: BingIcon,
Cohere: CohereIcon,