diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json index 39c50244c..66cd711e1 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json @@ -6,7 +6,7 @@ "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-GBI4i", + "id": "ChatInput-jbtaD", "name": "message", "output_types": [ "Message" @@ -14,7 +14,7 @@ }, "targetHandle": { "fieldName": "user_input", - "id": "Prompt-vrft7", + "id": "Prompt-0SBd6", "inputTypes": [ "Message", "Text" @@ -22,18 +22,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-GBI4i{œdataTypeœ:œChatInputœ,œidœ:œChatInput-GBI4iœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-vrft7{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-vrft7œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-GBI4i", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-GBI4iœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-vrft7", - "targetHandle": "{œfieldNameœ: œuser_inputœ, œidœ: œPrompt-vrft7œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-jbtaD{œdataTypeœ:œChatInputœ,œidœ:œChatInput-jbtaDœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-0SBd6{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-0SBd6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-jbtaD", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-jbtaDœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-0SBd6", + "targetHandle": "{œfieldNameœ: œuser_inputœ, œidœ: œPrompt-0SBd6œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-vrft7", + "id": "Prompt-0SBd6", "name": "prompt", "output_types": [ "Message" @@ -41,25 +41,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-Y1Uvp", + "id": "OpenAIModel-HBuxy", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-vrft7{œdataTypeœ:œPromptœ,œidœ:œPrompt-vrft7œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-Y1Uvp{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-Y1Uvpœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-vrft7", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-vrft7œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-Y1Uvp", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-Y1Uvpœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-0SBd6{œdataTypeœ:œPromptœ,œidœ:œPrompt-0SBd6œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-HBuxy{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-HBuxyœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-0SBd6", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-0SBd6œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-HBuxy", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-HBuxyœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "OpenAIModel", - "id": "OpenAIModel-Y1Uvp", + "id": "OpenAIModel-HBuxy", "name": "text_output", "output_types": [ "Message" @@ -67,18 +67,18 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-QRfK5", + "id": "ChatOutput-WG5tg", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-Y1Uvp{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Y1Uvpœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-QRfK5{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-QRfK5œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-Y1Uvp", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-Y1Uvpœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-QRfK5", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-QRfK5œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-HBuxy{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-HBuxyœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-WG5tg{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-WG5tgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-HBuxy", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-HBuxyœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-WG5tg", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-WG5tgœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ @@ -86,7 +86,7 @@ "data": { "description": "Get chat inputs from the Playground.", "display_name": "Chat Input", - "id": "ChatInput-GBI4i", + "id": "ChatInput-jbtaD", "node": { "base_classes": [ "Message" @@ -100,6 +100,7 @@ "edited": false, "field_order": [ "input_value", + "store_message", "sender", "sender_name", "session_id", @@ -140,7 +141,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -261,6 +262,21 @@ "trace_as_metadata": true, "type": "str", "value": "" + }, + "store_message": { + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true } } }, @@ -268,7 +284,7 @@ }, "dragging": false, "height": 309, - "id": "ChatInput-GBI4i", + "id": "ChatInput-jbtaD", "position": { "x": -493.6459512396177, "y": 1083.200545525551 @@ -285,7 +301,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-vrft7", + "id": "Prompt-0SBd6", "node": { "base_classes": [ "Message" @@ -387,7 +403,7 @@ }, "dragging": false, "height": 423, - "id": "Prompt-vrft7", + "id": "Prompt-0SBd6", "position": { "x": 56.354011530798516, "y": 1157.2005405164796 @@ -404,7 +420,7 @@ "data": { "description": "Display a chat message in the Playground.", "display_name": "Chat Output", - "id": "ChatOutput-QRfK5", + "id": "ChatOutput-WG5tg", "node": { "base_classes": [ "Message" @@ -418,6 +434,7 @@ "edited": false, "field_order": [ "input_value", + "store_message", "sender", "sender_name", "session_id", @@ -458,7 +475,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, @@ -557,6 +574,21 @@ "trace_as_metadata": true, "type": "str", "value": "" + }, + "store_message": { + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true } } }, @@ -564,7 +596,7 @@ }, "dragging": false, "height": 309, - "id": "ChatOutput-QRfK5", + "id": "ChatOutput-WG5tg", "position": { "x": 1219.477374823274, "y": 1200.950216973985 @@ -581,7 +613,7 @@ "data": { "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", - "id": "OpenAIModel-Y1Uvp", + "id": "OpenAIModel-HBuxy", "node": { "base_classes": [ "LanguageModel", @@ -857,7 +889,7 @@ }, "dragging": false, "height": 623, - "id": "OpenAIModel-Y1Uvp", + "id": "OpenAIModel-HBuxy", "position": { "x": 664.0296638933031, "y": 1026.5966174731725 @@ -872,15 +904,15 @@ } ], "viewport": { - "x": 283.5782991180689, - "y": -288.9547032063149, - "zoom": 0.43809300423581504 + "x": 427.12410642709614, + "y": -361.39815091467085, + "zoom": 0.5562299357713679 } }, "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ", "endpoint_name": null, - "id": "f672da95-a7e3-41d2-942d-7ceabe1c0daa", + "id": "b0e19aab-2095-41ee-b91c-1168790cc68b", "is_component": false, - "last_tested_version": "1.0.7", + "last_tested_version": "1.0.9", "name": "Basic Prompting (Hello, World)" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json index 692423903..a189acd65 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json @@ -6,7 +6,7 @@ "data": { "sourceHandle": { "dataType": "URL", - "id": "URL-9mY8k", + "id": "URL-76lwY", "name": "data", "output_types": [ "Data" @@ -14,25 +14,25 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-qtEJg", + "id": "ParseData-jYhXf", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-URL-9mY8k{œdataTypeœ:œURLœ,œidœ:œURL-9mY8kœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-qtEJg{œfieldNameœ:œdataœ,œidœ:œParseData-qtEJgœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "URL-9mY8k", - "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-9mY8kœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-qtEJg", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-qtEJgœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-URL-76lwY{œdataTypeœ:œURLœ,œidœ:œURL-76lwYœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-jYhXf{œfieldNameœ:œdataœ,œidœ:œParseData-jYhXfœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "URL-76lwY", + "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-76lwYœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-jYhXf", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-jYhXfœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-qtEJg", + "id": "ParseData-jYhXf", "name": "text", "output_types": [ "Message" @@ -40,7 +40,7 @@ }, "targetHandle": { "fieldName": "references", - "id": "Prompt-LvBLt", + "id": "Prompt-ABI8S", "inputTypes": [ "Message", "Text" @@ -48,18 +48,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-qtEJg{œdataTypeœ:œParseDataœ,œidœ:œParseData-qtEJgœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-LvBLt{œfieldNameœ:œreferencesœ,œidœ:œPrompt-LvBLtœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-qtEJg", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-qtEJgœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-LvBLt", - "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-LvBLtœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-jYhXf{œdataTypeœ:œParseDataœ,œidœ:œParseData-jYhXfœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-ABI8S{œfieldNameœ:œreferencesœ,œidœ:œPrompt-ABI8Sœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-jYhXf", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-jYhXfœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-ABI8S", + "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-ABI8Sœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "TextInput", - "id": "TextInput-uN09a", + "id": "TextInput-jiXJB", "name": "text", "output_types": [ "Message" @@ -67,7 +67,7 @@ }, "targetHandle": { "fieldName": "instructions", - "id": "Prompt-LvBLt", + "id": "Prompt-ABI8S", "inputTypes": [ "Message", "Text" @@ -75,18 +75,18 @@ "type": "str" } }, - "id": "reactflow__edge-TextInput-uN09a{œdataTypeœ:œTextInputœ,œidœ:œTextInput-uN09aœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-LvBLt{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-LvBLtœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "TextInput-uN09a", - "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-uN09aœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-LvBLt", - "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-LvBLtœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-TextInput-jiXJB{œdataTypeœ:œTextInputœ,œidœ:œTextInput-jiXJBœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-ABI8S{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-ABI8Sœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-jiXJB", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-jiXJBœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-ABI8S", + "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-ABI8Sœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-LvBLt", + "id": "Prompt-ABI8S", "name": "prompt", "output_types": [ "Message" @@ -94,25 +94,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-5Kl3e", + "id": "OpenAIModel-JBO2p", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-LvBLt{œdataTypeœ:œPromptœ,œidœ:œPrompt-LvBLtœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-5Kl3e{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-5Kl3eœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-LvBLt", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-LvBLtœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-5Kl3e", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-5Kl3eœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-ABI8S{œdataTypeœ:œPromptœ,œidœ:œPrompt-ABI8Sœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-JBO2p{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-JBO2pœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-ABI8S", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-ABI8Sœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-JBO2p", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-JBO2pœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "OpenAIModel", - "id": "OpenAIModel-5Kl3e", + "id": "OpenAIModel-JBO2p", "name": "text_output", "output_types": [ "Message" @@ -120,18 +120,18 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-HyN2e", + "id": "ChatOutput-uaX6T", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-5Kl3e{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-5Kl3eœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-HyN2e{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-HyN2eœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-5Kl3e", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-5Kl3eœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-HyN2e", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-HyN2eœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-JBO2p{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-JBO2pœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-uaX6T{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-uaX6Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-JBO2p", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-JBO2pœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-uaX6T", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-uaX6Tœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ @@ -139,7 +139,7 @@ "data": { "description": "Fetch content from one or more URLs.", "display_name": "URL", - "id": "URL-9mY8k", + "id": "URL-76lwY", "node": { "base_classes": [ "Data" @@ -220,7 +220,7 @@ }, "dragging": false, "height": 359, - "id": "URL-9mY8k", + "id": "URL-76lwY", "position": { "x": 220.79156431407534, "y": 498.8186168722667 @@ -237,7 +237,7 @@ "data": { "description": "Convert Data into plain text following a specified template.", "display_name": "Parse Data", - "id": "ParseData-qtEJg", + "id": "ParseData-jYhXf", "node": { "base_classes": [ "Message" @@ -353,7 +353,7 @@ }, "dragging": false, "height": 385, - "id": "ParseData-qtEJg", + "id": "ParseData-jYhXf", "position": { "x": 754.3607306709101, "y": 736.8516961537598 @@ -370,7 +370,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-LvBLt", + "id": "Prompt-ABI8S", "node": { "base_classes": [ "Message" @@ -497,7 +497,7 @@ }, "dragging": false, "height": 517, - "id": "Prompt-LvBLt", + "id": "Prompt-ABI8S", "position": { "x": 1368.0633591447076, "y": 467.19448061224284 @@ -514,7 +514,7 @@ "data": { "description": "Get text inputs from the Playground.", "display_name": "Instructions", - "id": "TextInput-uN09a", + "id": "TextInput-jiXJB", "node": { "base_classes": [ "Message" @@ -592,7 +592,7 @@ }, "dragging": false, "height": 309, - "id": "TextInput-uN09a", + "id": "TextInput-jiXJB", "position": { "x": 743.7338453293725, "y": 301.58775454952183 @@ -609,7 +609,7 @@ "data": { "description": "Display a chat message in the Playground.", "display_name": "Chat Output", - "id": "ChatOutput-HyN2e", + "id": "ChatOutput-uaX6T", "node": { "base_classes": [ "Message" @@ -623,6 +623,7 @@ "edited": false, "field_order": [ "input_value", + "store_message", "sender", "sender_name", "session_id", @@ -663,7 +664,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, @@ -762,6 +763,21 @@ "trace_as_metadata": true, "type": "str", "value": "" + }, + "store_message": { + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true } } }, @@ -769,7 +785,7 @@ }, "dragging": false, "height": 309, - "id": "ChatOutput-HyN2e", + "id": "ChatOutput-uaX6T", "position": { "x": 2449.3489426461606, "y": 571.2449700910389 @@ -786,7 +802,7 @@ "data": { "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", - "id": "OpenAIModel-5Kl3e", + "id": "OpenAIModel-JBO2p", "node": { "base_classes": [ "LanguageModel", @@ -1062,7 +1078,7 @@ }, "dragging": false, "height": 623, - "id": "OpenAIModel-5Kl3e", + "id": "OpenAIModel-JBO2p", "position": { "x": 1950.3830456413473, "y": 380.8161704718418 @@ -1077,15 +1093,15 @@ } ], "viewport": { - "x": 13.218836373936256, - "y": 51.81084733415963, - "zoom": 0.3400188154942784 + "x": -52.959712994147594, + "y": 41.95510708899229, + "zoom": 0.5873729194514925 } }, "description": "This flow can be used to create a blog post following instructions from the user, using two other blogs as reference.", "endpoint_name": null, - "id": "5572261c-afd4-41d1-971e-12b3b7577a24", + "id": "6b576678-66cd-4d6e-ab40-af1104f02c37", "is_component": false, - "last_tested_version": "1.0.7", + "last_tested_version": "1.0.9", "name": "Blog Writer" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json index c5ea61093..763af84ba 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json @@ -6,7 +6,7 @@ "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-0DUC7", + "id": "ChatInput-Emi4q", "name": "message", "output_types": [ "Message" @@ -14,7 +14,7 @@ }, "targetHandle": { "fieldName": "Question", - "id": "Prompt-LciEH", + "id": "Prompt-n8yRL", "inputTypes": [ "Message", "Text" @@ -22,18 +22,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-0DUC7{œdataTypeœ:œChatInputœ,œidœ:œChatInput-0DUC7œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-LciEH{œfieldNameœ:œQuestionœ,œidœ:œPrompt-LciEHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-0DUC7", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-0DUC7œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-LciEH", - "targetHandle": "{œfieldNameœ: œQuestionœ, œidœ: œPrompt-LciEHœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-Emi4q{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Emi4qœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-n8yRL{œfieldNameœ:œQuestionœ,œidœ:œPrompt-n8yRLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-Emi4q", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-Emi4qœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-n8yRL", + "targetHandle": "{œfieldNameœ: œQuestionœ, œidœ: œPrompt-n8yRLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-LciEH", + "id": "Prompt-n8yRL", "name": "prompt", "output_types": [ "Message" @@ -41,25 +41,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-ip5dm", + "id": "OpenAIModel-1hwZ2", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-LciEH{œdataTypeœ:œPromptœ,œidœ:œPrompt-LciEHœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-ip5dm{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-ip5dmœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-LciEH", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-LciEHœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-ip5dm", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-ip5dmœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-n8yRL{œdataTypeœ:œPromptœ,œidœ:œPrompt-n8yRLœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-1hwZ2{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-1hwZ2œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-n8yRL", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-n8yRLœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-1hwZ2", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-1hwZ2œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "OpenAIModel", - "id": "OpenAIModel-ip5dm", + "id": "OpenAIModel-1hwZ2", "name": "text_output", "output_types": [ "Message" @@ -67,25 +67,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-IP9p0", + "id": "ChatOutput-sD0lp", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-ip5dm{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-ip5dmœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-IP9p0{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-IP9p0œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-ip5dm", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-ip5dmœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-IP9p0", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-IP9p0œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-1hwZ2{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-1hwZ2œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-sD0lp{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-sD0lpœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-1hwZ2", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-1hwZ2œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-sD0lp", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-sD0lpœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-FchNf", + "id": "ParseData-qYLes", "name": "text", "output_types": [ "Message" @@ -93,7 +93,7 @@ }, "targetHandle": { "fieldName": "Document", - "id": "Prompt-LciEH", + "id": "Prompt-n8yRL", "inputTypes": [ "Message", "Text" @@ -101,18 +101,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-FchNf{œdataTypeœ:œParseDataœ,œidœ:œParseData-FchNfœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-LciEH{œfieldNameœ:œDocumentœ,œidœ:œPrompt-LciEHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-FchNf", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-FchNfœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-LciEH", - "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-LciEHœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-qYLes{œdataTypeœ:œParseDataœ,œidœ:œParseData-qYLesœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-n8yRL{œfieldNameœ:œDocumentœ,œidœ:œPrompt-n8yRLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-qYLes", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-qYLesœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-n8yRL", + "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-n8yRLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "File", - "id": "File-2kzl4", + "id": "File-0oa6O", "name": "data", "output_types": [ "Data" @@ -120,18 +120,18 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-FchNf", + "id": "ParseData-qYLes", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-File-2kzl4{œdataTypeœ:œFileœ,œidœ:œFile-2kzl4œ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-FchNf{œfieldNameœ:œdataœ,œidœ:œParseData-FchNfœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-2kzl4", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-2kzl4œ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-FchNf", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-FchNfœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-File-0oa6O{œdataTypeœ:œFileœ,œidœ:œFile-0oa6Oœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-qYLes{œfieldNameœ:œdataœ,œidœ:œParseData-qYLesœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-0oa6O", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-0oa6Oœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-qYLes", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-qYLesœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" } ], "nodes": [ @@ -139,7 +139,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-LciEH", + "id": "Prompt-n8yRL", "node": { "base_classes": [ "Message" @@ -266,7 +266,7 @@ }, "dragging": false, "height": 517, - "id": "Prompt-LciEH", + "id": "Prompt-n8yRL", "position": { "x": 637.3518652087848, "y": 47.191730368560215 @@ -283,7 +283,7 @@ "data": { "description": "Get chat inputs from the Playground.", "display_name": "Chat Input", - "id": "ChatInput-0DUC7", + "id": "ChatInput-Emi4q", "node": { "base_classes": [ "Message" @@ -297,6 +297,7 @@ "edited": false, "field_order": [ "input_value", + "store_message", "sender", "sender_name", "session_id", @@ -337,7 +338,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -458,6 +459,21 @@ "trace_as_metadata": true, "type": "str", "value": "" + }, + "store_message": { + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true } } }, @@ -465,7 +481,7 @@ }, "dragging": false, "height": 309, - "id": "ChatInput-0DUC7", + "id": "ChatInput-Emi4q", "position": { "x": 50.08709924122684, "y": 320.88186720121615 @@ -482,7 +498,7 @@ "data": { "description": "Display a chat message in the Playground.", "display_name": "Chat Output", - "id": "ChatOutput-IP9p0", + "id": "ChatOutput-sD0lp", "node": { "base_classes": [ "Message" @@ -496,6 +512,7 @@ "edited": false, "field_order": [ "input_value", + "store_message", "sender", "sender_name", "session_id", @@ -536,7 +553,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, @@ -635,6 +652,21 @@ "trace_as_metadata": true, "type": "str", "value": "" + }, + "store_message": { + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true } } }, @@ -642,7 +674,7 @@ }, "dragging": false, "height": 309, - "id": "ChatOutput-IP9p0", + "id": "ChatOutput-sD0lp", "position": { "x": 1831.1359796346408, "y": 139.5174517327903 @@ -659,7 +691,7 @@ "data": { "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", - "id": "OpenAIModel-ip5dm", + "id": "OpenAIModel-1hwZ2", "node": { "base_classes": [ "LanguageModel", @@ -935,7 +967,7 @@ }, "dragging": false, "height": 623, - "id": "OpenAIModel-ip5dm", + "id": "OpenAIModel-1hwZ2", "position": { "x": 1264.0039093582332, "y": -67.93731748926709 @@ -952,7 +984,7 @@ "data": { "description": "Convert Data into plain text following a specified template.", "display_name": "Parse Data", - "id": "ParseData-FchNf", + "id": "ParseData-qYLes", "node": { "base_classes": [ "Message" @@ -1068,7 +1100,7 @@ }, "dragging": false, "height": 385, - "id": "ParseData-FchNf", + "id": "ParseData-qYLes", "position": { "x": 87.26129917199853, "y": -181.46350622708565 @@ -1085,7 +1117,7 @@ "data": { "description": "A generic file loader.", "display_name": "File", - "id": "File-2kzl4", + "id": "File-0oa6O", "node": { "base_classes": [ "Data" @@ -1162,7 +1194,7 @@ "ts", "tsx" ], - "file_path": "", + "file_path": "049e2133-b45d-44a3-906a-1e8be93d9d7b/Prompt Engineering Guide 3ff66c517bff423c863bbb1c0eb21be8.md", "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", "list": false, "name": "path", @@ -1195,7 +1227,7 @@ }, "dragging": false, "height": 301, - "id": "File-2kzl4", + "id": "File-0oa6O", "position": { "x": -462.90407701896845, "y": -316.82165433756165 @@ -1210,15 +1242,15 @@ } ], "viewport": { - "x": 188.5057346133326, - "y": 210.35779755360858, + "x": 338.5057346133326, + "y": 271.3577975536086, "zoom": 0.36856730432277524 } }, "description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.", "endpoint_name": null, - "id": "cb65817a-dfdf-4df2-9bb0-16654bf3f3b3", + "id": "6fb449a7-563c-446e-82d2-36f3defb9a48", "is_component": false, - "last_tested_version": "1.0.7", + "last_tested_version": "1.0.9", "name": "Document QA" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json index ed6cde919..e9c1b085f 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json @@ -6,7 +6,7 @@ "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-NFUUk", + "id": "ChatInput-6yuNd", "name": "message", "output_types": [ "Message" @@ -14,7 +14,7 @@ }, "targetHandle": { "fieldName": "user_message", - "id": "Prompt-qeJau", + "id": "Prompt-tifRl", "inputTypes": [ "Message", "Text" @@ -22,18 +22,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-NFUUk{œdataTypeœ:œChatInputœ,œidœ:œChatInput-NFUUkœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-qeJau{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-qeJauœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-NFUUk", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-NFUUkœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-qeJau", - "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-qeJauœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-6yuNd{œdataTypeœ:œChatInputœ,œidœ:œChatInput-6yuNdœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-tifRl{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-tifRlœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-6yuNd", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-6yuNdœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-tifRl", + "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-tifRlœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-qeJau", + "id": "Prompt-tifRl", "name": "prompt", "output_types": [ "Message" @@ -41,25 +41,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-gGnwp", + "id": "OpenAIModel-ZIeE0", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-qeJau{œdataTypeœ:œPromptœ,œidœ:œPrompt-qeJauœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-gGnwp{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-gGnwpœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-qeJau", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-qeJauœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-gGnwp", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-gGnwpœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-tifRl{œdataTypeœ:œPromptœ,œidœ:œPrompt-tifRlœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-ZIeE0{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-ZIeE0œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-tifRl", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-tifRlœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-ZIeE0", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-ZIeE0œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "OpenAIModel", - "id": "OpenAIModel-gGnwp", + "id": "OpenAIModel-ZIeE0", "name": "text_output", "output_types": [ "Message" @@ -67,25 +67,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-yJml1", + "id": "ChatOutput-c3v9q", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-gGnwp{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-gGnwpœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-yJml1{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-yJml1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-gGnwp", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-gGnwpœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-yJml1", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-yJml1œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-ZIeE0{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-ZIeE0œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-c3v9q{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-c3v9qœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-ZIeE0", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-ZIeE0œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-c3v9q", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-c3v9qœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "Memory", - "id": "Memory-1cBNz", + "id": "Memory-6s5g1", "name": "messages_text", "output_types": [ "Message" @@ -93,7 +93,7 @@ }, "targetHandle": { "fieldName": "context", - "id": "Prompt-qeJau", + "id": "Prompt-tifRl", "inputTypes": [ "Message", "Text" @@ -101,11 +101,11 @@ "type": "str" } }, - "id": "reactflow__edge-Memory-1cBNz{œdataTypeœ:œMemoryœ,œidœ:œMemory-1cBNzœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-qeJau{œfieldNameœ:œcontextœ,œidœ:œPrompt-qeJauœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "Memory-1cBNz", - "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-1cBNzœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-qeJau", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-qeJauœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Memory-6s5g1{œdataTypeœ:œMemoryœ,œidœ:œMemory-6s5g1œ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-tifRl{œfieldNameœ:œcontextœ,œidœ:œPrompt-tifRlœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Memory-6s5g1", + "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-6s5g1œ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-tifRl", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-tifRlœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" } ], "nodes": [ @@ -113,7 +113,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-qeJau", + "id": "Prompt-tifRl", "node": { "base_classes": [ "Message" @@ -240,7 +240,7 @@ }, "dragging": false, "height": 517, - "id": "Prompt-qeJau", + "id": "Prompt-tifRl", "position": { "x": 1880.8227904110583, "y": 625.8049209882275 @@ -257,7 +257,7 @@ "data": { "description": "Get chat inputs from the Playground.", "display_name": "Chat Input", - "id": "ChatInput-NFUUk", + "id": "ChatInput-6yuNd", "node": { "base_classes": [ "Message" @@ -312,7 +312,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import HandleInput, BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -435,7 +435,7 @@ "value": "" }, "store_message": { - "advanced": false, + "advanced": true, "display_name": "Store Messages", "dynamic": false, "info": "Store the message in the history.", @@ -454,8 +454,8 @@ "type": "ChatInput" }, "dragging": false, - "height": 385, - "id": "ChatInput-NFUUk", + "height": 309, + "id": "ChatInput-6yuNd", "position": { "x": 1275.9262193671882, "y": 836.1228056896347 @@ -472,7 +472,7 @@ "data": { "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", - "id": "OpenAIModel-gGnwp", + "id": "OpenAIModel-ZIeE0", "node": { "base_classes": [ "LanguageModel", @@ -748,7 +748,7 @@ }, "dragging": false, "height": 623, - "id": "OpenAIModel-gGnwp", + "id": "OpenAIModel-ZIeE0", "position": { "x": 2468.968379487559, "y": 560.0689522326683 @@ -757,7 +757,7 @@ "x": 2468.968379487559, "y": 560.0689522326683 }, - "selected": true, + "selected": false, "type": "genericNode", "width": 384 }, @@ -765,7 +765,7 @@ "data": { "description": "Display a chat message in the Playground.", "display_name": "Chat Output", - "id": "ChatOutput-yJml1", + "id": "ChatOutput-c3v9q", "node": { "base_classes": [ "Message" @@ -820,7 +820,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import HandleInput, BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, @@ -921,7 +921,7 @@ "value": "" }, "store_message": { - "advanced": false, + "advanced": true, "display_name": "Store Messages", "dynamic": false, "info": "Store the message in the history.", @@ -940,7 +940,7 @@ "type": "ChatOutput" }, "height": 385, - "id": "ChatOutput-yJml1", + "id": "ChatOutput-c3v9q", "position": { "x": 3083.1710516244116, "y": 701.521688846004 @@ -953,7 +953,7 @@ "data": { "description": "Retrieves stored chat messages from Langflow tables or an external memory.", "display_name": "Chat Memory", - "id": "Memory-1cBNz", + "id": "Memory-6s5g1", "node": { "base_classes": [ "BaseChatMemory", @@ -1033,7 +1033,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import List, Sequence\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\", \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"Session ID of the chat history.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if sender:\n expected_type = \"Machine\" if sender == \"Machine\" else \"User\"\n stored = [m for m in stored if m.type == expected_type]\n if order == \"ASC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)" + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\", \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"Session ID of the chat history.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if sender:\n expected_type = \"Machine\" if sender == \"Machine\" else \"User\"\n stored = [m for m in stored if m.type == expected_type]\n if order == \"ASC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n" }, "memory": { "advanced": false, @@ -1172,7 +1172,7 @@ }, "dragging": false, "height": 387, - "id": "Memory-1cBNz", + "id": "Memory-6s5g1", "position": { "x": 1301.98330242754, "y": 422.33865605652574 @@ -1194,8 +1194,8 @@ }, "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", "endpoint_name": null, - "id": "9570a502-34bb-44bf-aa15-9f92e7d10efe", + "id": "ff6810d0-1d7b-4455-9b35-c9f54f7d63b6", "is_component": false, - "last_tested_version": "1.0.7", - "name": "memory_good" + "last_tested_version": "1.0.9", + "name": "Memory Chatbot" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json index d25cd8de7..36b8bea80 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json @@ -6,7 +6,7 @@ "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-0hHwK", + "id": "ChatInput-1Sa2a", "name": "message", "output_types": [ "Message" @@ -14,25 +14,25 @@ }, "targetHandle": { "fieldName": "search_input", - "id": "AstraDB-ekbDk", + "id": "AstraVectorStoreComponent-ANsbx", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-ChatInput-0hHwK{œdataTypeœ:œChatInputœ,œidœ:œChatInput-0hHwKœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraDB-ekbDk{œfieldNameœ:œsearch_inputœ,œidœ:œAstraDB-ekbDkœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "ChatInput-0hHwK", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-0hHwKœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "AstraDB-ekbDk", - "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraDB-ekbDkœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-1Sa2a{œdataTypeœ:œChatInputœ,œidœ:œChatInput-1Sa2aœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraVectorStoreComponent-ANsbx{œfieldNameœ:œsearch_inputœ,œidœ:œAstraVectorStoreComponent-ANsbxœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-1Sa2a", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-1Sa2aœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "AstraVectorStoreComponent-ANsbx", + "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraVectorStoreComponent-ANsbxœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-3kYvO", + "id": "ParseData-NJMcn", "name": "text", "output_types": [ "Message" @@ -40,7 +40,7 @@ }, "targetHandle": { "fieldName": "context", - "id": "Prompt-j0AtT", + "id": "Prompt-f6nr9", "inputTypes": [ "Message", "Text" @@ -48,18 +48,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-3kYvO{œdataTypeœ:œParseDataœ,œidœ:œParseData-3kYvOœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-j0AtT{œfieldNameœ:œcontextœ,œidœ:œPrompt-j0AtTœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-3kYvO", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-3kYvOœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-j0AtT", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-j0AtTœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-NJMcn{œdataTypeœ:œParseDataœ,œidœ:œParseData-NJMcnœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-f6nr9{œfieldNameœ:œcontextœ,œidœ:œPrompt-f6nr9œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-NJMcn", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-NJMcnœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-f6nr9", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-f6nr9œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-0hHwK", + "id": "ChatInput-1Sa2a", "name": "message", "output_types": [ "Message" @@ -67,7 +67,7 @@ }, "targetHandle": { "fieldName": "question", - "id": "Prompt-j0AtT", + "id": "Prompt-f6nr9", "inputTypes": [ "Message", "Text" @@ -75,18 +75,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-0hHwK{œdataTypeœ:œChatInputœ,œidœ:œChatInput-0hHwKœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-j0AtT{œfieldNameœ:œquestionœ,œidœ:œPrompt-j0AtTœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-0hHwK", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-0hHwKœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-j0AtT", - "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-j0AtTœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-1Sa2a{œdataTypeœ:œChatInputœ,œidœ:œChatInput-1Sa2aœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-f6nr9{œfieldNameœ:œquestionœ,œidœ:œPrompt-f6nr9œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-1Sa2a", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-1Sa2aœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-f6nr9", + "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-f6nr9œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "File", - "id": "File-fxVTd", + "id": "File-vPaII", "name": "data", "output_types": [ "Data" @@ -94,25 +94,25 @@ }, "targetHandle": { "fieldName": "data_inputs", - "id": "SplitText-IUHHm", + "id": "SplitText-GMvuX", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-File-fxVTd{œdataTypeœ:œFileœ,œidœ:œFile-fxVTdœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-IUHHm{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-IUHHmœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-fxVTd", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-fxVTdœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "SplitText-IUHHm", - "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-IUHHmœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-File-vPaII{œdataTypeœ:œFileœ,œidœ:œFile-vPaIIœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-GMvuX{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-GMvuXœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-vPaII", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-vPaIIœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "SplitText-GMvuX", + "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-GMvuXœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "SplitText", - "id": "SplitText-IUHHm", + "id": "SplitText-GMvuX", "name": "chunks", "output_types": [ "Data" @@ -120,25 +120,25 @@ }, "targetHandle": { "fieldName": "ingest_data", - "id": "AstraDB-53Vs1", + "id": "AstraVectorStoreComponent-sQo90", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-SplitText-IUHHm{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-IUHHmœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraDB-53Vs1{œfieldNameœ:œingest_dataœ,œidœ:œAstraDB-53Vs1œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "SplitText-IUHHm", - "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-IUHHmœ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", - "target": "AstraDB-53Vs1", - "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraDB-53Vs1œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-SplitText-GMvuX{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-GMvuXœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraVectorStoreComponent-sQo90{œfieldNameœ:œingest_dataœ,œidœ:œAstraVectorStoreComponent-sQo90œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "SplitText-GMvuX", + "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-GMvuXœ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", + "target": "AstraVectorStoreComponent-sQo90", + "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraVectorStoreComponent-sQo90œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-lH0Wv", + "id": "OpenAIEmbeddings-2Vcb5", "name": "embeddings", "output_types": [ "Embeddings" @@ -146,7 +146,7 @@ }, "targetHandle": { "fieldName": "embedding", - "id": "AstraDB-53Vs1", + "id": "AstraVectorStoreComponent-sQo90", "inputTypes": [ "Embeddings", "dict" @@ -154,18 +154,18 @@ "type": "other" } }, - "id": "reactflow__edge-OpenAIEmbeddings-lH0Wv{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-lH0Wvœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-53Vs1{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-53Vs1œ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-lH0Wv", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-lH0Wvœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraDB-53Vs1", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-53Vs1œ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-OpenAIEmbeddings-2Vcb5{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-2Vcb5œ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraVectorStoreComponent-sQo90{œfieldNameœ:œembeddingœ,œidœ:œAstraVectorStoreComponent-sQo90œ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-2Vcb5", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-2Vcb5œ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraVectorStoreComponent-sQo90", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraVectorStoreComponent-sQo90œ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" }, { "className": "", "data": { "sourceHandle": { "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-yhWIS", + "id": "OpenAIEmbeddings-bKlZn", "name": "embeddings", "output_types": [ "Embeddings" @@ -173,7 +173,7 @@ }, "targetHandle": { "fieldName": "embedding", - "id": "AstraDB-ekbDk", + "id": "AstraVectorStoreComponent-ANsbx", "inputTypes": [ "Embeddings", "dict" @@ -181,17 +181,18 @@ "type": "other" } }, - "id": "reactflow__edge-OpenAIEmbeddings-yhWIS{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-yhWISœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-ekbDk{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-ekbDkœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-yhWIS", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-yhWISœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraDB-ekbDk", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-ekbDkœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-OpenAIEmbeddings-bKlZn{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-bKlZnœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraVectorStoreComponent-ANsbx{œfieldNameœ:œembeddingœ,œidœ:œAstraVectorStoreComponent-ANsbxœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-bKlZn", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-bKlZnœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraVectorStoreComponent-ANsbx", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraVectorStoreComponent-ANsbxœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-j0AtT", + "id": "Prompt-f6nr9", "name": "prompt", "output_types": [ "Message" @@ -199,24 +200,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-CaT8d", + "id": "OpenAIModel-jjdFc", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-j0AtT{œdataTypeœ:œPromptœ,œidœ:œPrompt-j0AtTœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-CaT8d{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-CaT8dœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-j0AtT", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-j0AtTœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-CaT8d", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-CaT8dœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-f6nr9{œdataTypeœ:œPromptœ,œidœ:œPrompt-f6nr9œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-jjdFc{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-jjdFcœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-f6nr9", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-f6nr9œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-jjdFc", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-jjdFcœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "OpenAIModel", - "id": "OpenAIModel-CaT8d", + "id": "OpenAIModel-jjdFc", "name": "text_output", "output_types": [ "Message" @@ -224,24 +226,26 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-5HURX", + "id": "ChatOutput-9ol1i", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-CaT8d{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-CaT8dœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-5HURX{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-5HURXœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-CaT8d", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-CaT8dœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-5HURX", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-5HURXœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-jjdFc{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-jjdFcœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-9ol1i{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-9ol1iœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-jjdFc", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-jjdFcœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-9ol1i", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-9ol1iœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "ChatInput-0hHwK", + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-1Sa2a", "node": { "base_classes": [ "Message" @@ -255,6 +259,7 @@ "edited": false, "field_order": [ "input_value", + "store_message", "sender", "sender_name", "session_id", @@ -267,7 +272,6 @@ { "cache": true, "display_name": "Message", - "hidden": false, "method": "message_response", "name": "message", "selected": "Message", @@ -296,7 +300,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -417,6 +421,21 @@ "trace_as_metadata": true, "type": "str", "value": "" + }, + "store_message": { + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true } } }, @@ -424,7 +443,7 @@ }, "dragging": false, "height": 309, - "id": "ChatInput-0hHwK", + "id": "ChatInput-1Sa2a", "position": { "x": 642.3545710150049, "y": 220.22556606238678 @@ -442,7 +461,7 @@ "description": "Implementation of Vector Store using Astra DB with search capabilities", "display_name": "Astra DB", "edited": false, - "id": "AstraDB-ekbDk", + "id": "AstraVectorStoreComponent-ANsbx", "node": { "base_classes": [ "Data", @@ -880,7 +899,7 @@ }, "dragging": false, "height": 755, - "id": "AstraDB-ekbDk", + "id": "AstraVectorStoreComponent-ANsbx", "position": { "x": 1246.0381406498648, "y": 333.25157075413966 @@ -895,7 +914,9 @@ }, { "data": { - "id": "ParseData-3kYvO", + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", + "id": "ParseData-NJMcn", "node": { "base_classes": [ "Message" @@ -919,7 +940,6 @@ { "cache": true, "display_name": "Text", - "hidden": false, "method": "parse_data", "name": "text", "selected": "Message", @@ -948,7 +968,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" }, "data": { "advanced": false, @@ -1012,7 +1032,7 @@ }, "dragging": false, "height": 385, - "id": "ParseData-3kYvO", + "id": "ParseData-NJMcn", "position": { "x": 1854.1518317915907, "y": 459.3386924128532 @@ -1029,7 +1049,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-j0AtT", + "id": "Prompt-f6nr9", "node": { "base_classes": [ "Message" @@ -1046,23 +1066,16 @@ "display_name": "Prompt", "documentation": "", "edited": false, - "error": null, "field_order": [ "template" ], "frozen": false, - "full_path": null, "icon": "prompts", - "is_composition": null, - "is_input": null, - "is_output": null, - "name": "", "output_types": [], "outputs": [ { "cache": true, "display_name": "Prompt Message", - "hidden": false, "method": "build_prompt", "name": "prompt", "selected": "Message", @@ -1091,7 +1104,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_build_config: dict, current_build_config: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_build_config, current_build_config)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_build_config\n # and update the frontend_node with those values\n update_template_values(frontend_template=frontend_node, raw_template=current_build_config[\"template\"])\n return frontend_node\n" + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_build_config: dict, current_build_config: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_build_config, current_build_config)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_build_config\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_build_config[\"template\"])\n return frontend_node\n" }, "context": { "advanced": false, @@ -1147,6 +1160,7 @@ "dynamic": false, "info": "", "list": false, + "load_from_db": false, "name": "template", "placeholder": "", "required": false, @@ -1162,7 +1176,7 @@ }, "dragging": false, "height": 517, - "id": "Prompt-j0AtT", + "id": "Prompt-f6nr9", "position": { "x": 2486.0988668404975, "y": 496.5120474157301 @@ -1177,7 +1191,9 @@ }, { "data": { - "id": "ChatOutput-5HURX", + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-9ol1i", "node": { "base_classes": [ "Message" @@ -1191,6 +1207,7 @@ "edited": false, "field_order": [ "input_value", + "store_message", "sender", "sender_name", "session_id", @@ -1231,7 +1248,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, @@ -1330,6 +1347,21 @@ "trace_as_metadata": true, "type": "str", "value": "" + }, + "store_message": { + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true } } }, @@ -1337,7 +1369,7 @@ }, "dragging": false, "height": 309, - "id": "ChatOutput-5HURX", + "id": "ChatOutput-9ol1i", "position": { "x": 3769.242086248817, "y": 585.3403837062634 @@ -1352,7 +1384,9 @@ }, { "data": { - "id": "SplitText-IUHHm", + "description": "Split text into chunks based on specified criteria.", + "display_name": "Split Text", + "id": "SplitText-GMvuX", "node": { "base_classes": [ "Data" @@ -1377,7 +1411,6 @@ { "cache": true, "display_name": "Chunks", - "hidden": false, "method": "split_text", "name": "chunks", "selected": "Data", @@ -1436,7 +1469,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import List\n\nfrom langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n data = []\n for doc in docs:\n data.append(Data(text=doc.page_content, data=doc.metadata))\n return data\n\n def split_text(self) -> List[Data]:\n separator = unescape_string(self.separator)\n\n documents = []\n for _input in self.data_inputs:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n" + "value": "from typing import List\n\nfrom langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n name = \"SplitText\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n data = []\n for doc in docs:\n data.append(Data(text=doc.page_content, data=doc.metadata))\n return data\n\n def split_text(self) -> List[Data]:\n separator = unescape_string(self.separator)\n\n documents = []\n for _input in self.data_inputs:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n" }, "data_inputs": { "advanced": false, @@ -1482,7 +1515,7 @@ }, "dragging": false, "height": 557, - "id": "SplitText-IUHHm", + "id": "SplitText-GMvuX", "position": { "x": 2044.2799160989089, "y": 1185.3130355818519 @@ -1497,7 +1530,9 @@ }, { "data": { - "id": "File-fxVTd", + "description": "A generic file loader.", + "display_name": "File", + "id": "File-vPaII", "node": { "base_classes": [ "Data" @@ -1520,7 +1555,6 @@ { "cache": true, "display_name": "Data", - "hidden": false, "method": "load_file", "name": "data", "selected": "Data", @@ -1549,7 +1583,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from pathlib import Path\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=TEXT_FILE_TYPES,\n info=f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"load_file\"),\n ]\n\n def load_file(self) -> Data:\n if not self.path:\n raise ValueError(\"Please, upload a file to use this component.\")\n resolved_path = self.resolve_path(self.path)\n silent_errors = self.silent_errors\n\n extension = Path(resolved_path).suffix[1:].lower()\n\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n\n data = parse_text_file_to_data(resolved_path, silent_errors)\n self.status = data if data else \"No data\"\n return data or Data()\n" + "value": "from pathlib import Path\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n name = \"File\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=TEXT_FILE_TYPES,\n info=f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"load_file\"),\n ]\n\n def load_file(self) -> Data:\n if not self.path:\n raise ValueError(\"Please, upload a file to use this component.\")\n resolved_path = self.resolve_path(self.path)\n silent_errors = self.silent_errors\n\n extension = Path(resolved_path).suffix[1:].lower()\n\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n\n data = parse_text_file_to_data(resolved_path, silent_errors)\n self.status = data if data else \"No data\"\n return data or Data()\n" }, "path": { "advanced": false, @@ -1575,7 +1609,7 @@ "ts", "tsx" ], - "file_path": "14766ad5-1e23-462e-8f9e-2ed14f464506/BSCNCL97P18E730Y_F24 (1).pdf", + "file_path": "", "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", "list": false, "name": "path", @@ -1608,7 +1642,7 @@ }, "dragging": false, "height": 301, - "id": "File-fxVTd", + "id": "File-vPaII", "position": { "x": 1418.981990122179, "y": 1539.3825691184466 @@ -1626,7 +1660,7 @@ "description": "Implementation of Vector Store using Astra DB with search capabilities", "display_name": "Astra DB", "edited": false, - "id": "AstraDB-53Vs1", + "id": "AstraVectorStoreComponent-sQo90", "node": { "base_classes": [ "Data", @@ -2064,7 +2098,7 @@ }, "dragging": false, "height": 755, - "id": "AstraDB-53Vs1", + "id": "AstraVectorStoreComponent-sQo90", "position": { "x": 2678.506138892635, "y": 1267.3353646037478 @@ -2079,7 +2113,9 @@ }, { "data": { - "id": "OpenAIEmbeddings-lH0Wv", + "description": "Generate embeddings using OpenAI models.", + "display_name": "OpenAI Embeddings", + "id": "OpenAIEmbeddings-2Vcb5", "node": { "base_classes": [ "Embeddings" @@ -2111,7 +2147,8 @@ "show_progress_bar", "skip_empty", "tiktoken_model_name", - "tiktoken_enable" + "tiktoken_enable", + "dimensions" ], "frozen": false, "icon": "OpenAI", @@ -2120,7 +2157,6 @@ { "cache": true, "display_name": "Embeddings", - "hidden": false, "method": "build_embeddings", "name": "embeddings", "selected": "Embeddings", @@ -2184,7 +2220,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" + "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n name = \"OpenAIEmbeddings\"\n\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. Only supported by certain models.\",\n advanced=True,\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n dimensions=self.dimensions or None,\n )\n" }, "default_headers": { "advanced": true, @@ -2236,6 +2272,21 @@ "type": "str", "value": "" }, + "dimensions": { + "advanced": true, + "display_name": "Dimensions", + "dynamic": false, + "info": "The number of dimensions the resulting output embeddings should have. Only supported by certain models.", + "list": false, + "name": "dimensions", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, "embedding_ctx_length": { "advanced": true, "display_name": "Embedding Context Length", @@ -2306,7 +2357,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_base", "password": true, "placeholder": "", @@ -2338,7 +2389,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_type", "password": true, "placeholder": "", @@ -2494,7 +2545,7 @@ }, "dragging": false, "height": 395, - "id": "OpenAIEmbeddings-lH0Wv", + "id": "OpenAIEmbeddings-2Vcb5", "position": { "x": 2044.683126356786, "y": 1785.2283494456522 @@ -2509,7 +2560,9 @@ }, { "data": { - "id": "OpenAIEmbeddings-yhWIS", + "description": "Generate embeddings using OpenAI models.", + "display_name": "OpenAI Embeddings", + "id": "OpenAIEmbeddings-bKlZn", "node": { "base_classes": [ "Embeddings" @@ -2541,7 +2594,8 @@ "show_progress_bar", "skip_empty", "tiktoken_model_name", - "tiktoken_enable" + "tiktoken_enable", + "dimensions" ], "frozen": false, "icon": "OpenAI", @@ -2550,7 +2604,6 @@ { "cache": true, "display_name": "Embeddings", - "hidden": false, "method": "build_embeddings", "name": "embeddings", "selected": "Embeddings", @@ -2614,7 +2667,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" + "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n name = \"OpenAIEmbeddings\"\n\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. Only supported by certain models.\",\n advanced=True,\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n dimensions=self.dimensions or None,\n )\n" }, "default_headers": { "advanced": true, @@ -2666,6 +2719,21 @@ "type": "str", "value": "" }, + "dimensions": { + "advanced": true, + "display_name": "Dimensions", + "dynamic": false, + "info": "The number of dimensions the resulting output embeddings should have. Only supported by certain models.", + "list": false, + "name": "dimensions", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, "embedding_ctx_length": { "advanced": true, "display_name": "Embedding Context Length", @@ -2736,7 +2804,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_base", "password": true, "placeholder": "", @@ -2768,7 +2836,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_type", "password": true, "placeholder": "", @@ -2924,7 +2992,7 @@ }, "dragging": false, "height": 395, - "id": "OpenAIEmbeddings-yhWIS", + "id": "OpenAIEmbeddings-bKlZn", "position": { "x": 628.9252513328779, "y": 648.6750537749285 @@ -2939,7 +3007,9 @@ }, { "data": { - "id": "OpenAIModel-CaT8d", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-jjdFc", "node": { "base_classes": [ "LanguageModel", @@ -2973,7 +3043,6 @@ { "cache": true, "display_name": "Text", - "hidden": false, "method": "text_response", "name": "text_output", "selected": "Message", @@ -3013,7 +3082,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schea is a list of dictionarie s\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schea is a list of dictionarie s\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -3216,7 +3285,7 @@ }, "dragging": false, "height": 623, - "id": "OpenAIModel-CaT8d", + "id": "OpenAIModel-jjdFc", "position": { "x": 3138.7638747868177, "y": 413.0859233500825 @@ -3231,15 +3300,15 @@ } ], "viewport": { - "x": -209.6019287577517, - "y": -59.90908344462855, + "x": -598.6019287577517, + "y": -91.90908344462855, "zoom": 0.49394144815132496 } }, "description": "Visit https://docs.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", "endpoint_name": null, - "id": "14766ad5-1e23-462e-8f9e-2ed14f464506", + "id": "1538aa9c-e85c-4f98-a12b-25312f777991", "is_component": false, - "last_tested_version": "1.0.5", + "last_tested_version": "1.0.9", "name": "Vector Store RAG" } \ No newline at end of file