From c5306643bb3f531330e1b59abadd0baafc9827a2 Mon Sep 17 00:00:00 2001 From: Edwin Jose Date: Mon, 5 May 2025 15:23:57 -0400 Subject: [PATCH] fix: anthropic Model component base url empty issue (#7872) * Update anthropic.py * updates to templates * [autofix.ci] apply automated fixes * update the templates * Refactor Anthropic API URL handling to use a constant. Introduced DEFAULT_ANTHROPIC_API_URL in anthropic_constants.py and updated references in anthropic.py for improved maintainability and clarity. * [autofix.ci] apply automated fixes * Refactor AnthropicModelComponent to streamline API URL handling. Removed unnecessary base_url checks and ensured consistent use of DEFAULT_ANTHROPIC_API_URL for improved clarity and maintainability. * [autofix.ci] apply automated fixes * Fix base_url check in update_build_config to ensure key existence before accessing value * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Gabriel Luiz Freitas Almeida --- .../langflow/base/models/anthropic_constants.py | 3 +++ .../base/langflow/components/models/anthropic.py | 13 +++++++++---- .../starter_projects/Custom Component Maker.json | 4 ++-- .../starter_projects/LoopTemplate.json | 4 ++-- .../Portfolio Website Code Generator.json | 8 ++++---- 5 files changed, 20 insertions(+), 12 deletions(-) diff --git a/src/backend/base/langflow/base/models/anthropic_constants.py b/src/backend/base/langflow/base/models/anthropic_constants.py index 17e1f0001..ec918065a 100644 --- a/src/backend/base/langflow/base/models/anthropic_constants.py +++ b/src/backend/base/langflow/base/models/anthropic_constants.py @@ -28,3 +28,6 @@ DEPRECATED_MODELS = [ "claude-3-5-haiku-20241022", "claude-3-haiku-20240307", ] + + +DEFAULT_ANTHROPIC_API_URL = "https://api.anthropic.com" diff --git a/src/backend/base/langflow/components/models/anthropic.py b/src/backend/base/langflow/components/models/anthropic.py index 7601e3a2c..b0c13f5a6 100644 --- a/src/backend/base/langflow/components/models/anthropic.py +++ b/src/backend/base/langflow/components/models/anthropic.py @@ -1,10 +1,11 @@ -from typing import Any +from typing import Any, cast import requests from loguru import logger from langflow.base.models.anthropic_constants import ( ANTHROPIC_MODELS, + DEFAULT_ANTHROPIC_API_URL, TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS, TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS, ) @@ -58,8 +59,9 @@ class AnthropicModelComponent(LCModelComponent): name="base_url", display_name="Anthropic API URL", info="Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", - value="https://api.anthropic.com", + value=DEFAULT_ANTHROPIC_API_URL, real_time_refresh=True, + advanced=True, ), BoolInput( name="tool_model_enabled", @@ -88,7 +90,7 @@ class AnthropicModelComponent(LCModelComponent): anthropic_api_key=self.api_key, max_tokens_to_sample=self.max_tokens, temperature=self.temperature, - anthropic_api_url=self.base_url, + anthropic_api_url=DEFAULT_ANTHROPIC_API_URL, streaming=self.stream, ) except Exception as e: @@ -125,7 +127,7 @@ class AnthropicModelComponent(LCModelComponent): model_with_tool = ChatAnthropic( model=model, # Use the current model being checked anthropic_api_key=self.api_key, - anthropic_api_url=self.base_url, + anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL, ) if ( @@ -160,6 +162,9 @@ class AnthropicModelComponent(LCModelComponent): return None def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if "base_url" in build_config and build_config["base_url"]["value"] is None: + build_config["base_url"]["value"] = DEFAULT_ANTHROPIC_API_URL + self.base_url = DEFAULT_ANTHROPIC_API_URL if field_name in {"base_url", "model_name", "tool_model_enabled", "api_key"} and field_value: try: if len(self.api_key) == 0: diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json index c7050e01b..b51c73200 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json @@ -1458,7 +1458,7 @@ }, "base_url": { "_input_type": "MessageTextInput", - "advanced": false, + "advanced": true, "display_name": "Anthropic API URL", "dynamic": false, "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", @@ -1496,7 +1496,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any, cast\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n DEFAULT_ANTHROPIC_API_URL,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=DEFAULT_ANTHROPIC_API_URL,\n real_time_refresh=True,\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=DEFAULT_ANTHROPIC_API_URL,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if \"base_url\" in build_config and build_config[\"base_url\"][\"value\"] is None:\n build_config[\"base_url\"][\"value\"] = DEFAULT_ANTHROPIC_API_URL\n self.base_url = DEFAULT_ANTHROPIC_API_URL\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput", diff --git a/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json b/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json index e59f5c0f9..63b360c35 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/LoopTemplate.json @@ -597,7 +597,7 @@ }, "base_url": { "_input_type": "MessageTextInput", - "advanced": false, + "advanced": true, "display_name": "Anthropic API URL", "dynamic": false, "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", @@ -635,7 +635,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any, cast\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n DEFAULT_ANTHROPIC_API_URL,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=DEFAULT_ANTHROPIC_API_URL,\n real_time_refresh=True,\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=DEFAULT_ANTHROPIC_API_URL,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if \"base_url\" in build_config and build_config[\"base_url\"][\"value\"] is None:\n build_config[\"base_url\"][\"value\"] = DEFAULT_ANTHROPIC_API_URL\n self.base_url = DEFAULT_ANTHROPIC_API_URL\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput", diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json b/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json index 49e9f4fb0..e6ded5eb2 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json @@ -401,7 +401,7 @@ }, "base_url": { "_input_type": "MessageTextInput", - "advanced": false, + "advanced": true, "display_name": "Anthropic API URL", "dynamic": false, "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", @@ -439,7 +439,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any, cast\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n DEFAULT_ANTHROPIC_API_URL,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=DEFAULT_ANTHROPIC_API_URL,\n real_time_refresh=True,\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=DEFAULT_ANTHROPIC_API_URL,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if \"base_url\" in build_config and build_config[\"base_url\"][\"value\"] is None:\n build_config[\"base_url\"][\"value\"] = DEFAULT_ANTHROPIC_API_URL\n self.base_url = DEFAULT_ANTHROPIC_API_URL\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput", @@ -733,7 +733,7 @@ }, "base_url": { "_input_type": "MessageTextInput", - "advanced": false, + "advanced": true, "display_name": "Anthropic API URL", "dynamic": false, "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", @@ -771,7 +771,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any, cast\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n DEFAULT_ANTHROPIC_API_URL,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=DEFAULT_ANTHROPIC_API_URL,\n real_time_refresh=True,\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=DEFAULT_ANTHROPIC_API_URL,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if \"base_url\" in build_config and build_config[\"base_url\"][\"value\"] is None:\n build_config[\"base_url\"][\"value\"] = DEFAULT_ANTHROPIC_API_URL\n self.base_url = DEFAULT_ANTHROPIC_API_URL\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput",