feat(model): replace MessageTextInput with MultilineInput for system_message (#5532)

 (model.py): introduce MultilineInput class to handle system messages in LCModelComponent for better organization and readability

📝 (Basic Prompting.json): Update node IDs and edge IDs for better readability and consistency in the JSON file.

📝 (Basic Prompting.json): Update node properties to display a chat message in the Playground instead of generating text using OpenAI LLMs
♻️ (Basic Prompting.json): Refactor field_order and outputs properties for the Chat Output node to include new fields and remove unnecessary ones

 (Basic Prompting.json): Update ChatOutput component to include new inputs and outputs for better customization and functionality. Add support for storing messages, setting sender type, and customizing message appearance.

 (Basic Prompting.json): Update field names and values for better clarity and consistency
♻️ (Basic Prompting.json): Refactor field names and values to improve readability and maintainability

 (Basic Prompting.json): Update the content of the file to include a new class 'OpenAIModelComponent' with inputs for configuring OpenAI model settings and methods for building the model and handling exceptions. Remove the 'data_template' field and update 'input_value' field to 'MessageInput'. Add new fields 'json_mode', 'max_tokens', 'model_kwargs', and 'model_name' with their respective configurations.

📝 (Basic Prompting.json): Update values and descriptions for fields in the Basic Prompting starter project to improve clarity and usability. Add a new field for temperature control with a default value of 0.1.
This commit is contained in:
Cristhian Zanforlin Lousa 2025-01-03 11:23:52 -03:00 committed by GitHub
commit 16ff8eb200
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 389 additions and 355 deletions

View file

@ -10,8 +10,8 @@ from langchain_core.output_parsers import BaseOutputParser
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.custom import Component
from langflow.field_typing import LanguageModel
from langflow.inputs import MessageInput, MessageTextInput
from langflow.inputs.inputs import BoolInput, InputTypes
from langflow.inputs import MessageInput
from langflow.inputs.inputs import BoolInput, InputTypes, MultilineInput
from langflow.schema.message import Message
from langflow.template.field.base import Output
@ -26,7 +26,7 @@ class LCModelComponent(Component):
_base_inputs: list[InputTypes] = [
MessageInput(name="input_value", display_name="Input"),
MessageTextInput(
MultilineInput(
name="system_message",
display_name="System Message",
info="System message to pass to the model.",

View file

@ -2,12 +2,10 @@
"data": {
"edges": [
{
"animated": false,
"className": "",
"data": {
"sourceHandle": {
"dataType": "Prompt",
"id": "Prompt-euzzD",
"id": "Prompt-51zV0",
"name": "prompt",
"output_types": [
"Message"
@ -15,26 +13,24 @@
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-mMCO4",
"id": "OpenAIModel-GtPty",
"inputTypes": [
"Message"
],
"type": "str"
}
},
"id": "reactflow__edge-Prompt-euzzD{œdataTypeœ:œPromptœ,œidœ:œPrompt-euzzDœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-mMCO4{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-mMCO4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"source": "Prompt-euzzD",
"sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-euzzDœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}",
"target": "OpenAIModel-mMCO4",
"targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-mMCO4œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
"id": "xy-edge__Prompt-51zV0{œdataTypeœ:œPromptœ,œidœ:œPrompt-51zV0œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-GtPty{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-GtPtyœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"source": "Prompt-51zV0",
"sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-51zV0œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}",
"target": "OpenAIModel-GtPty",
"targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-GtPtyœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
},
{
"animated": false,
"className": "",
"data": {
"sourceHandle": {
"dataType": "ChatInput",
"id": "ChatInput-M2ktx",
"id": "ChatInput-5i1v3",
"name": "message",
"output_types": [
"Message"
@ -42,25 +38,24 @@
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-mMCO4",
"id": "OpenAIModel-GtPty",
"inputTypes": [
"Message"
],
"type": "str"
}
},
"id": "reactflow__edge-ChatInput-M2ktx{œdataTypeœ:œChatInputœ,œidœ:œChatInput-M2ktxœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-mMCO4{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-mMCO4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"source": "ChatInput-M2ktx",
"sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-M2ktxœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}",
"target": "OpenAIModel-mMCO4",
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-mMCO4œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
"id": "xy-edge__ChatInput-5i1v3{œdataTypeœ:œChatInputœ,œidœ:œChatInput-5i1v3œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-GtPty{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-GtPtyœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"source": "ChatInput-5i1v3",
"sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-5i1v3œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}",
"target": "OpenAIModel-GtPty",
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-GtPtyœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
},
{
"className": "",
"data": {
"sourceHandle": {
"dataType": "OpenAIModel",
"id": "OpenAIModel-mMCO4",
"id": "OpenAIModel-GtPty",
"name": "text_output",
"output_types": [
"Message"
@ -68,18 +63,18 @@
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-1s4P8",
"id": "ChatOutput-0kSEM",
"inputTypes": [
"Message"
],
"type": "str"
}
},
"id": "reactflow__edge-OpenAIModel-mMCO4{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-mMCO4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-1s4P8{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-1s4P8œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"source": "OpenAIModel-mMCO4",
"sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-mMCO4œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}",
"target": "ChatOutput-1s4P8",
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-1s4P8œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
"id": "xy-edge__OpenAIModel-GtPty{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-GtPtyœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-0kSEM{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-0kSEMœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"source": "OpenAIModel-GtPty",
"sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-GtPtyœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}",
"target": "ChatOutput-0kSEM",
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-0kSEMœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
}
],
"nodes": [
@ -87,7 +82,7 @@
"data": {
"description": "Get chat inputs from the Playground.",
"display_name": "Chat Input",
"id": "ChatInput-M2ktx",
"id": "ChatInput-5i1v3",
"node": {
"base_classes": [
"Message"
@ -352,7 +347,11 @@
},
"dragging": false,
"height": 234,
"id": "ChatInput-M2ktx",
"id": "ChatInput-5i1v3",
"measured": {
"height": 234,
"width": 320
},
"position": {
"x": 689.5720422421635,
"y": 765.155834131403
@ -369,7 +368,7 @@
"data": {
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"id": "Prompt-euzzD",
"id": "Prompt-51zV0",
"node": {
"base_classes": [
"Message"
@ -472,10 +471,14 @@
},
"dragging": false,
"height": 260,
"id": "Prompt-euzzD",
"id": "Prompt-51zV0",
"measured": {
"height": 260,
"width": 320
},
"position": {
"x": 690.2015147036818,
"y": 1018.5443911764344
"y": 1040.6625705470924
},
"positionAbsolute": {
"x": 690.2015147036818,
@ -487,7 +490,7 @@
},
{
"data": {
"id": "undefined-emdQy",
"id": "undefined-uksjN",
"node": {
"description": "## 📖 README\n\nPerform basic prompting with an OpenAI model.\n\n#### Quick Start\n- Add your **OpenAI API key** to the **OpenAI Model**\n- Open the **Playground** to chat with your bot.\n\n#### Next steps:\n Experiment by changing the prompt and the OpenAI model temperature to see how the bot's responses change.",
"display_name": "Read Me",
@ -499,7 +502,11 @@
},
"dragging": false,
"height": 250,
"id": "undefined-emdQy",
"id": "undefined-uksjN",
"measured": {
"height": 250,
"width": 600
},
"position": {
"x": 66.38770028934243,
"y": 749.744424427066
@ -519,7 +526,7 @@
},
{
"data": {
"id": "note-e6K1n",
"id": "note-krY7d",
"node": {
"description": "### 💡 Add your OpenAI API key here 👇",
"display_name": "",
@ -532,7 +539,11 @@
},
"dragging": false,
"height": 324,
"id": "note-e6K1n",
"id": "note-krY7d",
"measured": {
"height": 324,
"width": 324
},
"position": {
"x": 1075.829573520873,
"y": 657.2057655038416
@ -552,318 +563,7 @@
},
{
"data": {
"description": "Generates text using OpenAI LLMs.",
"display_name": "OpenAI",
"id": "OpenAIModel-mMCO4",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Generates text using OpenAI LLMs.",
"display_name": "OpenAI",
"documentation": "",
"edited": false,
"field_order": [
"input_value",
"system_message",
"stream",
"max_tokens",
"model_kwargs",
"json_mode",
"output_schema",
"model_name",
"openai_api_base",
"api_key",
"temperature",
"seed",
"output_parser"
],
"frozen": false,
"icon": "OpenAI",
"legacy": false,
"metadata": {},
"output_types": [],
"outputs": [
{
"cache": true,
"display_name": "Text",
"method": "text_response",
"name": "text_output",
"required_inputs": [],
"selected": "Message",
"types": [
"Message"
],
"value": "__UNDEFINED__"
},
{
"cache": true,
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [],
"selected": "LanguageModel",
"types": [
"LanguageModel"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"api_key": {
"_input_type": "SecretStrInput",
"advanced": false,
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"load_from_db": true,
"name": "api_key",
"password": true,
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"type": "str",
"value": "OPENAI_API_KEY"
},
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n SliderInput(\n name=\"temperature\", display_name=\"Temperature\", value=0.1, range_spec=RangeSpec(min=0, max=2, step=0.01)\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n"
},
"input_value": {
"_input_type": "MessageInput",
"advanced": false,
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "input_value",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"json_mode": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "JSON Mode",
"dynamic": false,
"info": "If True, it will output JSON regardless of passing a schema.",
"list": false,
"name": "json_mode",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": false
},
"max_tokens": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Max Tokens",
"dynamic": false,
"info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
"list": false,
"name": "max_tokens",
"placeholder": "",
"range_spec": {
"max": 128000,
"min": 0,
"step": 0.1,
"step_type": "float"
},
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "int",
"value": ""
},
"model_kwargs": {
"_input_type": "DictInput",
"advanced": true,
"display_name": "Model Kwargs",
"dynamic": false,
"info": "Additional keyword arguments to pass to the model.",
"list": false,
"name": "model_kwargs",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"type": "dict",
"value": {}
},
"model_name": {
"_input_type": "DropdownInput",
"advanced": false,
"combobox": false,
"display_name": "Model Name",
"dynamic": false,
"info": "",
"name": "model_name",
"options": [
"gpt-4o-mini",
"gpt-4o",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-4",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0125"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "gpt-4o-mini"
},
"openai_api_base": {
"_input_type": "StrInput",
"advanced": true,
"display_name": "OpenAI API Base",
"dynamic": false,
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.",
"list": false,
"load_from_db": false,
"name": "openai_api_base",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"seed": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Seed",
"dynamic": false,
"info": "The seed controls the reproducibility of the job.",
"list": false,
"name": "seed",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "int",
"value": 1
},
"stream": {
"_input_type": "BoolInput",
"advanced": false,
"display_name": "Stream",
"dynamic": false,
"info": "Stream the response from the model. Streaming works only in Chat.",
"list": false,
"name": "stream",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": false
},
"system_message": {
"_input_type": "MessageTextInput",
"advanced": false,
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "system_message",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"temperature": {
"_input_type": "FloatInput",
"advanced": false,
"display_name": "Temperature",
"dynamic": false,
"info": "",
"list": false,
"name": "temperature",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "float",
"value": 0.1
}
},
"tool_mode": false
},
"type": "OpenAIModel"
},
"dragging": false,
"height": 630,
"id": "OpenAIModel-mMCO4",
"position": {
"x": 1081.0157946607428,
"y": 707.3740542546418
},
"positionAbsolute": {
"x": 1081.0157946607428,
"y": 707.3740542546418
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"id": "ChatOutput-1s4P8",
"id": "ChatOutput-0kSEM",
"node": {
"base_classes": [
"Message"
@ -1122,9 +822,13 @@
},
"dragging": false,
"height": 234,
"id": "ChatOutput-1s4P8",
"id": "ChatOutput-0kSEM",
"measured": {
"height": 234,
"width": 320
},
"position": {
"x": 1444.936881624563,
"x": 1460.070372772908,
"y": 872.7273956769025
},
"positionAbsolute": {
@ -1134,12 +838,342 @@
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"id": "OpenAIModel-GtPty",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"beta": false,
"category": "models",
"conditional_paths": [],
"custom_fields": {},
"description": "Generates text using OpenAI LLMs.",
"display_name": "OpenAI",
"documentation": "",
"edited": false,
"field_order": [
"input_value",
"system_message",
"stream",
"max_tokens",
"model_kwargs",
"json_mode",
"model_name",
"openai_api_base",
"api_key",
"temperature",
"seed"
],
"frozen": false,
"icon": "OpenAI",
"key": "OpenAIModel",
"legacy": false,
"metadata": {},
"minimized": false,
"output_types": [],
"outputs": [
{
"cache": true,
"display_name": "Text",
"method": "text_response",
"name": "text_output",
"required_inputs": [],
"selected": "Message",
"types": [
"Message"
],
"value": "__UNDEFINED__"
},
{
"cache": true,
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [],
"selected": "LanguageModel",
"types": [
"LanguageModel"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"score": 2.220446049250313e-16,
"template": {
"_type": "Component",
"api_key": {
"_input_type": "SecretStrInput",
"advanced": false,
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"load_from_db": true,
"name": "api_key",
"password": true,
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"type": "str",
"value": ""
},
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n SliderInput(\n name=\"temperature\", display_name=\"Temperature\", value=0.1, range_spec=RangeSpec(min=0, max=2, step=0.01)\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n"
},
"input_value": {
"_input_type": "MessageInput",
"advanced": false,
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "input_value",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"json_mode": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "JSON Mode",
"dynamic": false,
"info": "If True, it will output JSON regardless of passing a schema.",
"list": false,
"name": "json_mode",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "bool",
"value": false
},
"max_tokens": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Max Tokens",
"dynamic": false,
"info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
"list": false,
"name": "max_tokens",
"placeholder": "",
"range_spec": {
"max": 128000,
"min": 0,
"step": 0.1,
"step_type": "float"
},
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "int",
"value": ""
},
"model_kwargs": {
"_input_type": "DictInput",
"advanced": true,
"display_name": "Model Kwargs",
"dynamic": false,
"info": "Additional keyword arguments to pass to the model.",
"list": false,
"name": "model_kwargs",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"type": "dict",
"value": {}
},
"model_name": {
"_input_type": "DropdownInput",
"advanced": false,
"combobox": false,
"display_name": "Model Name",
"dynamic": false,
"info": "",
"name": "model_name",
"options": [
"gpt-4o-mini",
"gpt-4o",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-4",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0125"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "gpt-4o-mini"
},
"openai_api_base": {
"_input_type": "StrInput",
"advanced": true,
"display_name": "OpenAI API Base",
"dynamic": false,
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.",
"list": false,
"load_from_db": false,
"name": "openai_api_base",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"seed": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Seed",
"dynamic": false,
"info": "The seed controls the reproducibility of the job.",
"list": false,
"name": "seed",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "int",
"value": 1
},
"stream": {
"_input_type": "BoolInput",
"advanced": false,
"display_name": "Stream",
"dynamic": false,
"info": "Stream the response from the model. Streaming works only in Chat.",
"list": false,
"name": "stream",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "bool",
"value": false
},
"system_message": {
"_input_type": "MultilineInput",
"advanced": false,
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"multiline": true,
"name": "system_message",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"temperature": {
"_input_type": "SliderInput",
"advanced": false,
"display_name": "Temperature",
"dynamic": false,
"info": "",
"max_label": "",
"max_label_icon": "",
"min_label": "",
"min_label_icon": "",
"name": "temperature",
"placeholder": "",
"range_spec": {
"max": 2,
"min": 0,
"step": 0.01,
"step_type": "float"
},
"required": false,
"show": true,
"slider_buttons": false,
"slider_buttons_options": [],
"slider_input": false,
"title_case": false,
"tool_mode": false,
"type": "slider",
"value": 0.1
}
},
"tool_mode": false
},
"showNode": true,
"type": "OpenAIModel"
},
"dragging": false,
"id": "OpenAIModel-GtPty",
"measured": {
"height": 770,
"width": 360
},
"position": {
"x": 1075.2249314800433,
"y": 729.622454137517
},
"selected": false,
"type": "genericNode"
}
],
"viewport": {
"x": -21.631817700819965,
"y": -334.5576887147924,
"zoom": 0.7749929474098888
"x": -352.0739415813646,
"y": -439.19385215506,
"zoom": 0.8590218788624804
}
},
"description": "Perform basic prompting with an OpenAI model.",