diff --git a/src/backend/base/langflow/components/models/openai.py b/src/backend/base/langflow/components/models/openai.py index 60f960a73..932bbf489 100644 --- a/src/backend/base/langflow/components/models/openai.py +++ b/src/backend/base/langflow/components/models/openai.py @@ -1,6 +1,3 @@ -import operator -from functools import reduce - from langchain_openai import ChatOpenAI from pydantic.v1 import SecretStr @@ -9,7 +6,6 @@ from langflow.base.models.openai_constants import OPENAI_MODEL_NAMES from langflow.field_typing import LanguageModel from langflow.field_typing.range_spec import RangeSpec from langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput -from langflow.inputs.inputs import HandleInput class OpenAIModelComponent(LCModelComponent): @@ -39,15 +35,6 @@ class OpenAIModelComponent(LCModelComponent): advanced=True, info="If True, it will output JSON regardless of passing a schema.", ), - DictInput( - name="output_schema", - is_list=True, - display_name="Schema", - advanced=True, - info="The schema for the Output of the model. " - "You must pass the word JSON in the prompt. " - "If left blank, JSON mode will be disabled. [DEPRECATED]", - ), DropdownInput( name="model_name", display_name="Model Name", @@ -80,26 +67,16 @@ class OpenAIModelComponent(LCModelComponent): advanced=True, value=1, ), - HandleInput( - name="output_parser", - display_name="Output Parser", - info="The parser to use to parse the output of the model", - advanced=True, - input_types=["OutputParser"], - ), ] def build_model(self) -> LanguageModel: # type: ignore[type-var] - # self.output_schema is a list of dictionaries - # let's convert it to a dictionary - output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {}) openai_api_key = self.api_key temperature = self.temperature model_name: str = self.model_name max_tokens = self.max_tokens model_kwargs = self.model_kwargs or {} openai_api_base = self.openai_api_base or "https://api.openai.com/v1" - json_mode = bool(output_schema_dict) or self.json_mode + json_mode = self.json_mode seed = self.seed api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None @@ -113,10 +90,7 @@ class OpenAIModelComponent(LCModelComponent): seed=seed, ) if json_mode: - if output_schema_dict: - output = output.with_structured_output(schema=output_schema_dict, method="json_mode") - else: - output = output.bind(response_format={"type": "json_object"}) + output = output.bind(response_format={"type": "json_object"}) return output