diff --git a/src/backend/base/langflow/base/models/anthropic_constants.py b/src/backend/base/langflow/base/models/anthropic_constants.py index 6ecdba236..17e1f0001 100644 --- a/src/backend/base/langflow/base/models/anthropic_constants.py +++ b/src/backend/base/langflow/base/models/anthropic_constants.py @@ -1,10 +1,30 @@ ANTHROPIC_MODELS = [ + # all the models below support tool calling also + "claude-3-7-sonnet-latest", "claude-3-5-sonnet-latest", "claude-3-5-haiku-latest", "claude-3-opus-latest", - "claude-3-5-sonnet-20240620", - "claude-3-5-sonnet-20241022", - "claude-3-5-haiku-20241022", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", ] + +TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS = [ + # all the models below support tool calling also + "claude-3-7-sonnet-latest", + "claude-3-5-sonnet-latest", + "claude-3-5-haiku-latest", + "claude-3-opus-latest", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", +] +TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS = [ + "claude-2.1", + "claude-2.0", +] + +DEPRECATED_MODELS = [ + "claude-3-5-sonnet-20240620", + "claude-3-5-sonnet-20241022", + "claude-3-5-haiku-20241022", + "claude-3-haiku-20240307", +] diff --git a/src/backend/base/langflow/components/models/anthropic.py b/src/backend/base/langflow/components/models/anthropic.py index bfaa7eec1..7601e3a2c 100644 --- a/src/backend/base/langflow/components/models/anthropic.py +++ b/src/backend/base/langflow/components/models/anthropic.py @@ -3,7 +3,11 @@ from typing import Any import requests from loguru import logger -from langflow.base.models.anthropic_constants import ANTHROPIC_MODELS +from langflow.base.models.anthropic_constants import ( + ANTHROPIC_MODELS, + TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS, + TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS, +) from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.field_typing.range_spec import RangeSpec @@ -99,24 +103,41 @@ class AnthropicModelComponent(LCModelComponent): client = anthropic.Anthropic(api_key=self.api_key) models = client.models.list(limit=20).data - model_ids = [model.id for model in models] + model_ids = ANTHROPIC_MODELS + [model.id for model in models] except (ImportError, ValueError, requests.exceptions.RequestException) as e: logger.exception(f"Error getting model names: {e}") model_ids = ANTHROPIC_MODELS + if tool_model_enabled: try: from langchain_anthropic.chat_models import ChatAnthropic except ImportError as e: msg = "langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`." raise ImportError(msg) from e + + # Create a new list instead of modifying while iterating + filtered_models = [] for model in model_ids: + if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS: + filtered_models.append(model) + continue + model_with_tool = ChatAnthropic( - model=self.model_name, + model=model, # Use the current model being checked anthropic_api_key=self.api_key, anthropic_api_url=self.base_url, ) - if not self.supports_tool_calling(model_with_tool): - model_ids.remove(model) + + if ( + not self.supports_tool_calling(model_with_tool) + or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS + ): + continue + + filtered_models.append(model) + + return filtered_models + return model_ids def _get_exception_message(self, exception: Exception) -> str | None: @@ -151,6 +172,7 @@ class AnthropicModelComponent(LCModelComponent): ids = ANTHROPIC_MODELS build_config["model_name"]["options"] = ids build_config["model_name"]["value"] = ids[0] + build_config["model_name"]["combobox"] = True except Exception as e: msg = f"Error getting model names: {e}" raise ValueError(msg) from e diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json index 99be8e5ef..c7050e01b 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json @@ -1496,7 +1496,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput", @@ -1549,12 +1549,10 @@ "info": "", "name": "model_name", "options": [ + "claude-3-7-sonnet-latest", "claude-3-5-sonnet-latest", "claude-3-5-haiku-latest", "claude-3-opus-latest", - "claude-3-5-sonnet-20240620", - "claude-3-5-sonnet-20241022", - "claude-3-5-haiku-20241022", "claude-3-sonnet-20240229", "claude-3-haiku-20240307" ], diff --git a/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json b/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json index 41c4464c8..e59f5c0f9 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json @@ -635,7 +635,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput", @@ -688,12 +688,10 @@ "info": "", "name": "model_name", "options": [ + "claude-3-7-sonnet-latest", "claude-3-5-sonnet-latest", "claude-3-5-haiku-latest", "claude-3-opus-latest", - "claude-3-5-sonnet-20240620", - "claude-3-5-sonnet-20241022", - "claude-3-5-haiku-20241022", "claude-3-sonnet-20240229", "claude-3-haiku-20240307" ], diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json b/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json index a59830714..49e9f4fb0 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json @@ -439,7 +439,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput", @@ -492,12 +492,10 @@ "info": "", "name": "model_name", "options": [ + "claude-3-7-sonnet-latest", "claude-3-5-sonnet-latest", "claude-3-5-haiku-latest", "claude-3-opus-latest", - "claude-3-5-sonnet-20240620", - "claude-3-5-sonnet-20241022", - "claude-3-5-haiku-20241022", "claude-3-sonnet-20240229", "claude-3-haiku-20240307" ], @@ -773,7 +771,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput", @@ -826,12 +824,10 @@ "info": "", "name": "model_name", "options": [ + "claude-3-7-sonnet-latest", "claude-3-5-sonnet-latest", "claude-3-5-haiku-latest", "claude-3-opus-latest", - "claude-3-5-sonnet-20240620", - "claude-3-5-sonnet-20241022", - "claude-3-5-haiku-20241022", "claude-3-sonnet-20240229", "claude-3-haiku-20240307" ],