diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json index ed2419a68..3d928b649 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json @@ -6,7 +6,7 @@ "data": { "sourceHandle": { "dataType": "Memory", - "id": "Memory-H6QVu", + "id": "Memory-rvcL5", "name": "messages_text", "output_types": [ "Message" @@ -14,7 +14,7 @@ }, "targetHandle": { "fieldName": "context", - "id": "Prompt-Q6gRn", + "id": "Prompt-VuDd0", "inputTypes": [ "Document", "Message", @@ -24,18 +24,18 @@ "type": "str" } }, - "id": "reactflow__edge-Memory-H6QVu{œdataTypeœ:œMemoryœ,œidœ:œMemory-H6QVuœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-Q6gRn{œfieldNameœ:œcontextœ,œidœ:œPrompt-Q6gRnœ,œinputTypesœ:[œDocumentœ,œMessageœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", - "source": "Memory-H6QVu", - "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-H6QVuœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-Q6gRn", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-Q6gRnœ, œinputTypesœ: [œDocumentœ, œMessageœ, œRecordœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Memory-rvcL5{œdataTypeœ:œMemoryœ,œidœ:œMemory-rvcL5œ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-VuDd0{œfieldNameœ:œcontextœ,œidœ:œPrompt-VuDd0œ,œinputTypesœ:[œDocumentœ,œMessageœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "source": "Memory-rvcL5", + "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-rvcL5œ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-VuDd0", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-VuDd0œ, œinputTypesœ: [œDocumentœ, œMessageœ, œRecordœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-HDCD9", + "id": "ChatInput-9iFsd", "name": "message", "output_types": [ "Message" @@ -43,7 +43,7 @@ }, "targetHandle": { "fieldName": "user_message", - "id": "Prompt-Q6gRn", + "id": "Prompt-VuDd0", "inputTypes": [ "Document", "Message", @@ -53,18 +53,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-HDCD9{œdataTypeœ:œChatInputœ,œidœ:œChatInput-HDCD9œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-Q6gRn{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-Q6gRnœ,œinputTypesœ:[œDocumentœ,œMessageœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-HDCD9", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-HDCD9œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-Q6gRn", - "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-Q6gRnœ, œinputTypesœ: [œDocumentœ, œMessageœ, œRecordœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-9iFsd{œdataTypeœ:œChatInputœ,œidœ:œChatInput-9iFsdœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-VuDd0{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-VuDd0œ,œinputTypesœ:[œDocumentœ,œMessageœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-9iFsd", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-9iFsdœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-VuDd0", + "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-VuDd0œ, œinputTypesœ: [œDocumentœ, œMessageœ, œRecordœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-Q6gRn", + "id": "Prompt-VuDd0", "name": "prompt", "output_types": [ "Message" @@ -72,25 +72,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-nNBA0", + "id": "OpenAIModel-uVOc5", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-Q6gRn{œdataTypeœ:œPromptœ,œidœ:œPrompt-Q6gRnœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-nNBA0{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-nNBA0œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-Q6gRn", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-Q6gRnœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-nNBA0", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-nNBA0œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-VuDd0{œdataTypeœ:œPromptœ,œidœ:œPrompt-VuDd0œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-uVOc5{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-uVOc5œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-VuDd0", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-VuDd0œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-uVOc5", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-uVOc5œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "OpenAIModel", - "id": "OpenAIModel-nNBA0", + "id": "OpenAIModel-uVOc5", "name": "text_output", "output_types": [ "Message" @@ -98,7 +98,7 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-0wtla", + "id": "ChatOutput-R7jsA", "inputTypes": [ "Message", "str" @@ -106,11 +106,11 @@ "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-nNBA0{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-nNBA0œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-0wtla{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-0wtlaœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-nNBA0", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-nNBA0œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-0wtla", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-0wtlaœ, œinputTypesœ: [œMessageœ, œstrœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-uVOc5{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-uVOc5œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-R7jsA{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-R7jsAœ,œinputTypesœ:[œMessageœ,œstrœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-uVOc5", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-uVOc5œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-R7jsA", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-R7jsAœ, œinputTypesœ: [œMessageœ, œstrœ], œtypeœ: œstrœ}" } ], "nodes": [ @@ -118,7 +118,7 @@ "data": { "description": "A component for creating prompt templates using dynamic variables.", "display_name": "Prompt", - "id": "Prompt-Q6gRn", + "id": "Prompt-VuDd0", "node": { "base_classes": [ "Text", @@ -259,7 +259,7 @@ }, "dragging": false, "height": 525, - "id": "Prompt-Q6gRn", + "id": "Prompt-VuDd0", "position": { "x": 1900.7563740044732, "y": 755.4337191022057 @@ -274,7 +274,10 @@ }, { "data": { - "id": "Memory-H6QVu", + "description": "Retrieves stored chat messages.", + "display_name": "Memory", + "edited": false, + "id": "Memory-rvcL5", "node": { "base_classes": [ "Data", @@ -286,20 +289,22 @@ "description": "Retrieves stored chat messages.", "display_name": "Memory", "documentation": "", + "edited": true, "field_order": [ "sender", "sender_name", "n_messages", "session_id", - "order" + "order", + "template" ], "frozen": false, - "icon": "history", + "icon": "message-square-more", "output_types": [], "outputs": [ { "cache": true, - "display_name": "Message Data", + "display_name": "Chat History", "method": "retrieve_messages", "name": "messages", "selected": "Data", @@ -310,7 +315,7 @@ }, { "cache": true, - "display_name": "Parsed", + "display_name": "Messages (Text)", "method": "retrieve_messages_as_text", "name": "messages_text", "selected": "Message", @@ -339,7 +344,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.custom import Component\nfrom langflow.io import DropdownInput, IntInput, Output, TextInput\nfrom langflow.memory import get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass MemoryComponent(Component):\n display_name = \"Memory\"\n description = \"Retrieves stored chat messages.\"\n icon = \"history\"\n\n inputs = [\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\", \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n TextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"Session ID of the chat history.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Message Data\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Parsed\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n messages = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = messages\n return messages\n\n def retrieve_messages_as_text(self) -> Message:\n messages = self.retrieve_messages()\n messages_text = \"\\n\".join([\"{sender_name}: {text}\".format(**message.data) for message in messages])\n self.status = messages_text\n return Message(text=messages_text)\n" + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DropdownInput, IntInput, MultilineInput, Output, TextInput\nfrom langflow.memory import get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages.\"\n icon = \"message-square-more\"\n\n inputs = [\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\", \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n TextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"Session ID of the chat history.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chat History\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n messages = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = messages\n return messages\n\n def retrieve_messages_as_text(self) -> Message:\n messages_text = data_to_text(self.template, self.retrieve_messages())\n self.status = messages_text\n return Message(text=messages_text)\n" }, "n_messages": { "advanced": true, @@ -400,7 +405,6 @@ ], "list": false, "load_from_db": false, - "multiline": true, "name": "sender_name", "placeholder": "", "required": false, @@ -419,7 +423,6 @@ ], "list": false, "load_from_db": false, - "multiline": true, "name": "session_id", "placeholder": "", "required": false, @@ -427,6 +430,25 @@ "title_case": false, "type": "str", "value": "" + }, + "template": { + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "{sender_name}: {text}" } } }, @@ -434,7 +456,7 @@ }, "dragging": false, "height": 267, - "id": "Memory-H6QVu", + "id": "Memory-rvcL5", "position": { "x": 1258.8089948698466, "y": 547.1243849102437 @@ -449,7 +471,7 @@ }, { "data": { - "id": "ChatInput-HDCD9", + "id": "ChatInput-9iFsd", "node": { "base_classes": [ "Message" @@ -627,7 +649,7 @@ }, "dragging": false, "height": 309, - "id": "ChatInput-HDCD9", + "id": "ChatInput-9iFsd", "position": { "x": 1246.4850995457527, "y": 912.733279525042 @@ -642,10 +664,13 @@ }, { "data": { - "id": "OpenAIModel-nNBA0", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-uVOc5", "node": { "base_classes": [ - "BaseLanguageModel", + "LanguageModel", "Message" ], "beta": false, @@ -654,6 +679,7 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", + "edited": true, "field_order": [ "input_value", "max_tokens", @@ -713,7 +739,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema.message import Message\nfrom langflow.template import Output\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text_output\", method=\"text_response\"),\n Output(display_name=\"Language Model\", name=\"model_output\", method=\"build_model\"),\n ]\n\n def text_response(self) -> Message:\n input_value = self.input_value\n stream = self.stream\n system_message = self.system_message\n output = self.build_model()\n result = self.get_chat_result(output, stream, input_value, system_message)\n self.status = result\n return result\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs or {},\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n\n return output\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -725,7 +751,6 @@ ], "list": false, "load_from_db": false, - "multiline": true, "name": "input_value", "placeholder": "", "required": false, @@ -789,7 +814,6 @@ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", "list": false, "load_from_db": false, - "multiline": true, "name": "openai_api_base", "placeholder": "", "required": false, @@ -863,7 +887,6 @@ "info": "System message to pass to the model.", "list": false, "load_from_db": false, - "multiline": true, "name": "system_message", "placeholder": "", "required": false, @@ -892,14 +915,14 @@ }, "dragging": false, "height": 623, - "id": "OpenAIModel-nNBA0", + "id": "OpenAIModel-uVOc5", "position": { - "x": 2505.576405157388, - "y": 662.147407935124 + "x": 2495.6628431453228, + "y": 668.0955451423632 }, "positionAbsolute": { - "x": 2505.576405157388, - "y": 662.147407935124 + "x": 2495.6628431453228, + "y": 668.0955451423632 }, "selected": false, "type": "genericNode", @@ -907,7 +930,7 @@ }, { "data": { - "id": "ChatOutput-0wtla", + "id": "ChatOutput-R7jsA", "node": { "base_classes": [ "Message" @@ -1065,7 +1088,7 @@ }, "dragging": false, "height": 309, - "id": "ChatOutput-0wtla", + "id": "ChatOutput-R7jsA", "position": { "x": 3129.987101578166, "y": 888.0854888768531 @@ -1080,15 +1103,15 @@ } ], "viewport": { - "x": 0, - "y": 0, - "zoom": 1 + "x": -527.2609043386433, + "y": 33.26280492099636, + "zoom": 0.48650433790103115 } }, "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", - "endpoint_name": "None-3", - "id": "fff22177-f477-4f68-a6f8-7972775cb38a", + "endpoint_name": null, + "id": "4e88f957-1541-4760-8a03-6132d4b14090", "is_component": false, - "last_tested_version": "1.0.0a59", + "last_tested_version": "1.0.0a61", "name": "Memory Chatbot" } \ No newline at end of file