From 43fa592bdba13e4750167da1c45bf361a707de42 Mon Sep 17 00:00:00 2001 From: Johngdae Date: Thu, 23 Nov 2023 04:48:39 +0000 Subject: [PATCH] The Anthropic component, did not work properly, so it was developed additionally. --- .../langflow/components/llms/AnthropicLLM.py | 71 +++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 src/backend/langflow/components/llms/AnthropicLLM.py diff --git a/src/backend/langflow/components/llms/AnthropicLLM.py b/src/backend/langflow/components/llms/AnthropicLLM.py new file mode 100644 index 000000000..1f75ba39f --- /dev/null +++ b/src/backend/langflow/components/llms/AnthropicLLM.py @@ -0,0 +1,71 @@ +from typing import Optional +from langflow import CustomComponent +from langchain.chat_models.anthropic import ChatAnthropic +from langchain.llms.base import BaseLLM + + +class AnthropicLLM(CustomComponent): + display_name: str = "AnthropicLLM" + description: str = "Anthropic Chat&Completion large language models." + + def build_config(self): + return { + "model": { + "display_name": "Model Name", + "options": [ + "claude-2.1", + "claude-2.0", + "claude-instant-1.2", + "claude-instant-1" + # Add more models as needed + ], + "info": "https://python.langchain.com/docs/integrations/chat/anthropic", + "required": True, + "value": "claude-2.1", + }, + "anthropic_api_key": { + "display_name": "Anthropic API Key", + "required": True, + "password": True, + "info": "Your Anthropic API key.", + }, + "max_tokens": { + "display_name": "Max Tokens", + "field_type": "int", + "value": 256, + }, + "temperature": { + "display_name": "Temperature", + "field_type": "float", + "value": 0.7, + }, + "api_endpoint": { + "display_name": "API Endpoint", + "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", + }, + "code": {"show": False}, + } + + def build( + self, + model: str, + anthropic_api_key: Optional[str] = None, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + api_endpoint: Optional[str] = None, + ) -> BaseLLM: + # Set default API endpoint if not provided + if not api_endpoint: + api_endpoint = "https://api.anthropic.com" + + try: + output = ChatAnthropic( + model=model, + anthropic_api_key=anthropic_api_key, + max_tokens_to_sample=max_tokens, + temperature=temperature, + anthropic_api_url=api_endpoint, + ) + except Exception as e: + raise ValueError("Could not connect to Anthropic API.") from e + return output