refactor(openai): Remove deprecated parameters from OpenAI component (#5171)

* refactor(openai): remove deprecated output_schema and redundant output_parser parameters

* refactor(openai): remove output_schema and output_parser parameters

Following @ogabrielluiz's review feedback to properly handle removed inputs:

1. Remove output_schema parameter:
   - Remove from inputs list
   - Remove output_schema_dict conversion in build_model
   - Remove output_schema_dict usage in json_mode logic
   - Remove output_schema_dict usage in structured_output

2. Remove output_parser parameter:
   - Remove from inputs list (no code references to clean up as it wasn't being used)

This ensures we don't have any lingering references to removed parameters.

* [autofix.ci] apply automated fixes

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
Raphael Valdetaro 2024-12-10 11:45:32 -03:00 committed by GitHub
commit 152ed8bb54
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1,6 +1,3 @@
import operator
from functools import reduce
from langchain_openai import ChatOpenAI
from pydantic.v1 import SecretStr
@ -9,7 +6,6 @@ from langflow.base.models.openai_constants import OPENAI_MODEL_NAMES
from langflow.field_typing import LanguageModel
from langflow.field_typing.range_spec import RangeSpec
from langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput
from langflow.inputs.inputs import HandleInput
class OpenAIModelComponent(LCModelComponent):
@ -39,15 +35,6 @@ class OpenAIModelComponent(LCModelComponent):
advanced=True,
info="If True, it will output JSON regardless of passing a schema.",
),
DictInput(
name="output_schema",
is_list=True,
display_name="Schema",
advanced=True,
info="The schema for the Output of the model. "
"You must pass the word JSON in the prompt. "
"If left blank, JSON mode will be disabled. [DEPRECATED]",
),
DropdownInput(
name="model_name",
display_name="Model Name",
@ -80,26 +67,16 @@ class OpenAIModelComponent(LCModelComponent):
advanced=True,
value=1,
),
HandleInput(
name="output_parser",
display_name="Output Parser",
info="The parser to use to parse the output of the model",
advanced=True,
input_types=["OutputParser"],
),
]
def build_model(self) -> LanguageModel: # type: ignore[type-var]
# self.output_schema is a list of dictionaries
# let's convert it to a dictionary
output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})
openai_api_key = self.api_key
temperature = self.temperature
model_name: str = self.model_name
max_tokens = self.max_tokens
model_kwargs = self.model_kwargs or {}
openai_api_base = self.openai_api_base or "https://api.openai.com/v1"
json_mode = bool(output_schema_dict) or self.json_mode
json_mode = self.json_mode
seed = self.seed
api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None
@ -113,10 +90,7 @@ class OpenAIModelComponent(LCModelComponent):
seed=seed,
)
if json_mode:
if output_schema_dict:
output = output.with_structured_output(schema=output_schema_dict, method="json_mode")
else:
output = output.bind(response_format={"type": "json_object"})
output = output.bind(response_format={"type": "json_object"})
return output