The Anthropic component, did not work properly, so it was developed additionally.

This commit is contained in:
Johngdae 2023-11-23 04:48:39 +00:00
commit 43fa592bdb

View file

@ -0,0 +1,71 @@
from typing import Optional
from langflow import CustomComponent
from langchain.chat_models.anthropic import ChatAnthropic
from langchain.llms.base import BaseLLM
class AnthropicLLM(CustomComponent):
display_name: str = "AnthropicLLM"
description: str = "Anthropic Chat&Completion large language models."
def build_config(self):
return {
"model": {
"display_name": "Model Name",
"options": [
"claude-2.1",
"claude-2.0",
"claude-instant-1.2",
"claude-instant-1"
# Add more models as needed
],
"info": "https://python.langchain.com/docs/integrations/chat/anthropic",
"required": True,
"value": "claude-2.1",
},
"anthropic_api_key": {
"display_name": "Anthropic API Key",
"required": True,
"password": True,
"info": "Your Anthropic API key.",
},
"max_tokens": {
"display_name": "Max Tokens",
"field_type": "int",
"value": 256,
},
"temperature": {
"display_name": "Temperature",
"field_type": "float",
"value": 0.7,
},
"api_endpoint": {
"display_name": "API Endpoint",
"info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
},
"code": {"show": False},
}
def build(
self,
model: str,
anthropic_api_key: Optional[str] = None,
max_tokens: Optional[int] = None,
temperature: Optional[float] = None,
api_endpoint: Optional[str] = None,
) -> BaseLLM:
# Set default API endpoint if not provided
if not api_endpoint:
api_endpoint = "https://api.anthropic.com"
try:
output = ChatAnthropic(
model=model,
anthropic_api_key=anthropic_api_key,
max_tokens_to_sample=max_tokens,
temperature=temperature,
anthropic_api_url=api_endpoint,
)
except Exception as e:
raise ValueError("Could not connect to Anthropic API.") from e
return output