diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py index ad24f7c25..1a24e7b87 100644 --- a/src/backend/base/langflow/base/models/model.py +++ b/src/backend/base/langflow/base/models/model.py @@ -175,7 +175,8 @@ class LCModelComponent(Component): messages.insert(0, SystemMessage(content=system_message)) inputs: list | dict = messages or {} try: - if self.output_parser is not None: + # TODO: Depreciated Feature to be removed in upcoming release + if hasattr(self, "output_parser") and self.output_parser is not None: runnable |= self.output_parser runnable = runnable.with_config( diff --git a/src/backend/base/langflow/components/models/aiml.py b/src/backend/base/langflow/components/models/aiml.py index b91291f91..202f47fd5 100644 --- a/src/backend/base/langflow/components/models/aiml.py +++ b/src/backend/base/langflow/components/models/aiml.py @@ -7,7 +7,6 @@ from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.field_typing.range_spec import RangeSpec from langflow.inputs import DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput -from langflow.inputs.inputs import HandleInput class AIMLModelComponent(LCModelComponent): @@ -49,13 +48,6 @@ class AIMLModelComponent(LCModelComponent): value="AIML_API_KEY", ), FloatInput(name="temperature", display_name="Temperature", value=0.1), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] @override diff --git a/src/backend/base/langflow/components/models/amazon_bedrock.py b/src/backend/base/langflow/components/models/amazon_bedrock.py index 4c2c65736..de4155899 100644 --- a/src/backend/base/langflow/components/models/amazon_bedrock.py +++ b/src/backend/base/langflow/components/models/amazon_bedrock.py @@ -2,7 +2,6 @@ from langflow.base.models.aws_constants import AWS_REGIONS, AWS_MODEL_IDs from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.inputs import MessageTextInput, SecretStrInput -from langflow.inputs.inputs import HandleInput from langflow.io import DictInput, DropdownInput @@ -73,13 +72,6 @@ class AmazonBedrockComponent(LCModelComponent): advanced=True, info="The URL of the Bedrock endpoint to use.", ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/anthropic.py b/src/backend/base/langflow/components/models/anthropic.py index f7aa9c6f2..66886b792 100644 --- a/src/backend/base/langflow/components/models/anthropic.py +++ b/src/backend/base/langflow/components/models/anthropic.py @@ -3,7 +3,6 @@ from pydantic.v1 import SecretStr from langflow.base.models.anthropic_constants import ANTHROPIC_MODELS from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput @@ -40,13 +39,6 @@ class AnthropicModelComponent(LCModelComponent): MessageTextInput( name="prefill", display_name="Prefill", info="Prefill text to guide the model's response.", advanced=True ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/azure_openai.py b/src/backend/base/langflow/components/models/azure_openai.py index da9d1cecc..726620ce8 100644 --- a/src/backend/base/langflow/components/models/azure_openai.py +++ b/src/backend/base/langflow/components/models/azure_openai.py @@ -3,7 +3,6 @@ from langchain_openai import AzureChatOpenAI from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.inputs import MessageTextInput -from langflow.inputs.inputs import HandleInput from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput @@ -57,13 +56,6 @@ class AzureChatOpenAIComponent(LCModelComponent): advanced=True, info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/baidu_qianfan_chat.py b/src/backend/base/langflow/components/models/baidu_qianfan_chat.py index b5ae6b03a..f1742b8b4 100644 --- a/src/backend/base/langflow/components/models/baidu_qianfan_chat.py +++ b/src/backend/base/langflow/components/models/baidu_qianfan_chat.py @@ -3,7 +3,6 @@ from pydantic.v1 import SecretStr from langflow.base.models.model import LCModelComponent from langflow.field_typing.constants import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import DropdownInput, FloatInput, MessageTextInput, SecretStrInput @@ -67,13 +66,6 @@ class QianfanChatEndpointComponent(LCModelComponent): MessageTextInput( name="endpoint", display_name="Endpoint", info="Endpoint of the Qianfan LLM, required if custom model used." ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/cohere.py b/src/backend/base/langflow/components/models/cohere.py index 5c224744c..e1957d24d 100644 --- a/src/backend/base/langflow/components/models/cohere.py +++ b/src/backend/base/langflow/components/models/cohere.py @@ -3,7 +3,6 @@ from pydantic.v1 import SecretStr from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import FloatInput, SecretStrInput @@ -24,13 +23,6 @@ class CohereComponent(LCModelComponent): value="COHERE_API_KEY", ), FloatInput(name="temperature", display_name="Temperature", value=0.75), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/google_generative_ai.py b/src/backend/base/langflow/components/models/google_generative_ai.py index 6848c4ae3..83b7e0d20 100644 --- a/src/backend/base/langflow/components/models/google_generative_ai.py +++ b/src/backend/base/langflow/components/models/google_generative_ai.py @@ -4,7 +4,6 @@ from langflow.base.models.google_generative_ai_constants import GOOGLE_GENERATIV from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput -from langflow.inputs.inputs import HandleInput class GoogleGenerativeAIComponent(LCModelComponent): @@ -50,13 +49,6 @@ class GoogleGenerativeAIComponent(LCModelComponent): info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", advanced=True, ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/groq.py b/src/backend/base/langflow/components/models/groq.py index 26d19ce8d..010221144 100644 --- a/src/backend/base/langflow/components/models/groq.py +++ b/src/backend/base/langflow/components/models/groq.py @@ -5,7 +5,6 @@ from typing_extensions import override from langflow.base.models.groq_constants import GROQ_MODELS from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput @@ -52,13 +51,6 @@ class GroqModel(LCModelComponent): value="llama-3.1-8b-instant", refresh_button=True, ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def get_models(self) -> list[str]: diff --git a/src/backend/base/langflow/components/models/huggingface.py b/src/backend/base/langflow/components/models/huggingface.py index e65c5ed45..96eaa89b1 100644 --- a/src/backend/base/langflow/components/models/huggingface.py +++ b/src/backend/base/langflow/components/models/huggingface.py @@ -7,7 +7,6 @@ from tenacity import retry, stop_after_attempt, wait_fixed # Need to update to langchain_huggingface, but have dependency with langchain_core 0.3.0 from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput @@ -75,13 +74,6 @@ class HuggingFaceEndpointsComponent(LCModelComponent): SecretStrInput(name="huggingfacehub_api_token", display_name="API Token", password=True), DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True), IntInput(name="retry_attempts", display_name="Retry Attempts", value=1, advanced=True), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def get_api_url(self) -> str: diff --git a/src/backend/base/langflow/components/models/lmstudiomodel.py b/src/backend/base/langflow/components/models/lmstudiomodel.py index 49ee8439f..6a503f33f 100644 --- a/src/backend/base/langflow/components/models/lmstudiomodel.py +++ b/src/backend/base/langflow/components/models/lmstudiomodel.py @@ -9,7 +9,6 @@ from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.field_typing.range_spec import RangeSpec from langflow.inputs import DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput -from langflow.inputs.inputs import HandleInput class LMStudioModelComponent(LCModelComponent): @@ -84,13 +83,6 @@ class LMStudioModelComponent(LCModelComponent): advanced=True, value=1, ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/maritalk.py b/src/backend/base/langflow/components/models/maritalk.py index 80b37d2de..b2f386e10 100644 --- a/src/backend/base/langflow/components/models/maritalk.py +++ b/src/backend/base/langflow/components/models/maritalk.py @@ -4,7 +4,6 @@ from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.field_typing.range_spec import RangeSpec from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput -from langflow.inputs.inputs import HandleInput class MaritalkModelComponent(LCModelComponent): @@ -35,13 +34,6 @@ class MaritalkModelComponent(LCModelComponent): advanced=False, ), FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/mistral.py b/src/backend/base/langflow/components/models/mistral.py index 26f7226ac..d5fb967f0 100644 --- a/src/backend/base/langflow/components/models/mistral.py +++ b/src/backend/base/langflow/components/models/mistral.py @@ -3,7 +3,6 @@ from pydantic.v1 import SecretStr from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import BoolInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput @@ -55,13 +54,6 @@ class MistralAIModelComponent(LCModelComponent): FloatInput(name="top_p", display_name="Top P", advanced=True, value=1), IntInput(name="random_seed", display_name="Random Seed", value=1, advanced=True), BoolInput(name="safe_mode", display_name="Safe Mode", advanced=True), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/nvidia.py b/src/backend/base/langflow/components/models/nvidia.py index 998a59dd6..58bd6da8c 100644 --- a/src/backend/base/langflow/components/models/nvidia.py +++ b/src/backend/base/langflow/components/models/nvidia.py @@ -3,7 +3,6 @@ from typing import Any from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput -from langflow.inputs.inputs import HandleInput from langflow.schema.dotdict import dotdict @@ -49,13 +48,6 @@ class NVIDIAModelComponent(LCModelComponent): advanced=True, value=1, ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): diff --git a/src/backend/base/langflow/components/models/ollama.py b/src/backend/base/langflow/components/models/ollama.py index 2feab380e..11c79cb02 100644 --- a/src/backend/base/langflow/components/models/ollama.py +++ b/src/backend/base/langflow/components/models/ollama.py @@ -6,7 +6,6 @@ from langchain_ollama import ChatOllama from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, StrInput @@ -166,13 +165,6 @@ class ChatOllamaComponent(LCModelComponent): ), StrInput(name="system", display_name="System", info="System to use for generating text.", advanced=True), StrInput(name="template", display_name="Template", info="Template to use for generating text.", advanced=True), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), *LCModelComponent._base_inputs, ] diff --git a/src/backend/base/langflow/components/models/perplexity.py b/src/backend/base/langflow/components/models/perplexity.py index a1adc50e2..8208cee28 100644 --- a/src/backend/base/langflow/components/models/perplexity.py +++ b/src/backend/base/langflow/components/models/perplexity.py @@ -3,7 +3,6 @@ from pydantic.v1 import SecretStr from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput @@ -60,13 +59,6 @@ class PerplexityComponent(LCModelComponent): info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", advanced=True, ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/sambanova.py b/src/backend/base/langflow/components/models/sambanova.py index 33a662804..cb2f44d8e 100644 --- a/src/backend/base/langflow/components/models/sambanova.py +++ b/src/backend/base/langflow/components/models/sambanova.py @@ -4,7 +4,6 @@ from pydantic.v1 import SecretStr from langflow.base.models.model import LCModelComponent from langflow.base.models.sambanova_constants import SAMBANOVA_MODEL_NAMES from langflow.field_typing import LanguageModel -from langflow.inputs.inputs import HandleInput from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput @@ -47,13 +46,6 @@ class SambaNovaComponent(LCModelComponent): info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", ), FloatInput(name="temperature", display_name="Temperature", value=0.07), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] diff --git a/src/backend/base/langflow/components/models/vertexai.py b/src/backend/base/langflow/components/models/vertexai.py index e7c5990db..20e616177 100644 --- a/src/backend/base/langflow/components/models/vertexai.py +++ b/src/backend/base/langflow/components/models/vertexai.py @@ -3,7 +3,6 @@ from typing import cast from langflow.base.models.model import LCModelComponent from langflow.field_typing import LanguageModel from langflow.inputs import MessageTextInput -from langflow.inputs.inputs import HandleInput from langflow.io import BoolInput, FileInput, FloatInput, IntInput, StrInput @@ -30,13 +29,6 @@ class ChatVertexAIComponent(LCModelComponent): IntInput(name="top_k", display_name="Top K", advanced=True), FloatInput(name="top_p", display_name="Top P", value=0.95, advanced=True), BoolInput(name="verbose", display_name="Verbose", value=False, advanced=True), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json index 5dc7f9a0b..d900fcd5d 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json @@ -1433,7 +1433,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from pydantic.v1 import SecretStr\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs.inputs import HandleInput\nfrom langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n info=\"https://python.langchain.com/docs/integrations/chat/anthropic\",\n value=\"claude-3-5-sonnet-latest\",\n ),\n SecretStrInput(name=\"anthropic_api_key\", display_name=\"Anthropic API Key\", info=\"Your Anthropic API key.\"),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n MessageTextInput(\n name=\"anthropic_api_url\",\n display_name=\"Anthropic API URL\",\n advanced=True,\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n model = self.model\n anthropic_api_key = self.anthropic_api_key\n max_tokens = self.max_tokens\n temperature = self.temperature\n anthropic_api_url = self.anthropic_api_url or \"https://api.anthropic.com\"\n\n try:\n output = ChatAnthropic(\n model=model,\n anthropic_api_key=(SecretStr(anthropic_api_key).get_secret_value() if anthropic_api_key else None),\n max_tokens_to_sample=max_tokens,\n temperature=temperature,\n anthropic_api_url=anthropic_api_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n" + "value": "from pydantic.v1 import SecretStr\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n info=\"https://python.langchain.com/docs/integrations/chat/anthropic\",\n value=\"claude-3-5-sonnet-latest\",\n ),\n SecretStrInput(name=\"anthropic_api_key\", display_name=\"Anthropic API Key\", info=\"Your Anthropic API key.\"),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n MessageTextInput(\n name=\"anthropic_api_url\",\n display_name=\"Anthropic API URL\",\n advanced=True,\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n model = self.model\n anthropic_api_key = self.anthropic_api_key\n max_tokens = self.max_tokens\n temperature = self.temperature\n anthropic_api_url = self.anthropic_api_url or \"https://api.anthropic.com\"\n\n try:\n output = ChatAnthropic(\n model=model,\n anthropic_api_key=(SecretStr(anthropic_api_key).get_secret_value() if anthropic_api_key else None),\n max_tokens_to_sample=max_tokens,\n temperature=temperature,\n anthropic_api_url=anthropic_api_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n" }, "input_value": { "_input_type": "MessageInput", @@ -1498,25 +1498,6 @@ "type": "str", "value": "claude-3-5-sonnet-20240620" }, - "output_parser": { - "_input_type": "HandleInput", - "advanced": true, - "display_name": "Output Parser", - "dynamic": false, - "info": "The parser to use to parse the output of the model", - "input_types": [ - "OutputParser" - ], - "list": false, - "name": "output_parser", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, "prefill": { "_input_type": "MessageTextInput", "advanced": true, diff --git a/src/backend/tests/unit/components/models/test_huggingface.py b/src/backend/tests/unit/components/models/test_huggingface.py index b813c5b47..3b1ceb659 100644 --- a/src/backend/tests/unit/components/models/test_huggingface.py +++ b/src/backend/tests/unit/components/models/test_huggingface.py @@ -1,5 +1,5 @@ from langflow.components.models.huggingface import HuggingFaceEndpointsComponent -from langflow.inputs.inputs import DictInput, DropdownInput, FloatInput, HandleInput, IntInput, SecretStrInput, StrInput +from langflow.inputs.inputs import DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput def test_huggingface_inputs(): @@ -20,7 +20,6 @@ def test_huggingface_inputs(): "huggingfacehub_api_token": SecretStrInput, "model_kwargs": DictInput, "retry_attempts": IntInput, - "output_parser": HandleInput, } # Check if all expected inputs are present