From b31c5e6b4a7afa7cddb48dcae41aa27f315a6cf7 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira <62335616+lucaseduoli@users.noreply.github.com> Date: Mon, 1 Jul 2024 13:31:06 -0300 Subject: [PATCH] Fix Global Variables (#2430) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update linting workflows to include dev branch in merge_group * Update README.md Add 1.0 banner * Update README.md * chore: update package versions in pyproject.toml files * Added db value to every onChange on parameters * Refactored global variables to find variables on the first render, and to execute just one onChange to update both values and db * Changed group recursion function to include check for global variables already applied or outdated * Removed already inserted component check for default fields on global variables * Fixed import error * Added required parameters to update node on drop * Removed check for unused hardcoded name * Added global variables handling on nodes when adding a flow * Fixed maximum update depth when deleting used global variable * Fixed type error on addNewVariableButton modal * Fixed openai api key on starter flows * Fixed values to get the .env values by default on the starter projects * Formatted flows * fix erros reported by mypy * [autofix.ci] apply automated fixes * 🐛 (prototypes/__init__.py): fix missing comma in the list of imported components to prevent syntax error * chore: Fix type hinting in ConditionalRouterComponent * [autofix.ci] apply automated fixes --------- Co-authored-by: Gabriel Luiz Freitas Almeida Co-authored-by: Rodrigo Nader Co-authored-by: italojohnny Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .../components/embeddings/OpenAIEmbeddings.py | 2 +- .../Basic Prompting (Hello, World).json | 101 +- .../starter_projects/Blog Writer.json | 133 +- .../starter_projects/Document QA.json | 147 +- .../starter_projects/Memory Chatbot.json | 114 +- .../starter_projects/Vector Store RAG.json | 1178 +++++++++-------- .../components/parameterComponent/index.tsx | 13 +- .../hooks/use-handle-new-value.tsx | 6 +- .../addNewVariableButton.tsx | 4 +- .../components/codeAreaComponent/index.tsx | 2 +- .../src/components/floatComponent/index.tsx | 2 +- .../components/inputFileComponent/index.tsx | 2 +- .../components/inputGlobalComponent/index.tsx | 41 +- .../src/components/intComponent/index.tsx | 2 +- .../src/components/promptComponent/index.tsx | 2 +- .../components/tableNodeCellRender/index.tsx | 30 +- .../components/textAreaComponent/index.tsx | 2 +- .../editNodeModal/hooks/use-column-defs.tsx | 4 +- .../src/modals/editNodeModal/index.tsx | 10 +- .../src/modals/newFlowModal/index.tsx | 6 - src/frontend/src/stores/flowStore.ts | 8 +- src/frontend/src/stores/flowsManagerStore.ts | 11 + .../globalVariablesStore/globalVariables.ts | 10 +- src/frontend/src/types/components/index.ts | 31 +- .../types/zustand/globalVariables/index.ts | 2 +- src/frontend/src/utils/reactflowUtils.ts | 58 +- 26 files changed, 1036 insertions(+), 885 deletions(-) diff --git a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py index 039caea24..7f56874be 100644 --- a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py @@ -40,7 +40,7 @@ class OpenAIEmbeddingsComponent(LCEmbeddingsModel): ), DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), SecretStrInput(name="openai_api_base", display_name="OpenAI API Base", advanced=True), - SecretStrInput(name="openai_api_key", display_name="OpenAI API Key"), + SecretStrInput(name="openai_api_key", display_name="OpenAI API Key", value="OPENAI_API_KEY"), SecretStrInput(name="openai_api_type", display_name="OpenAI API Type", advanced=True), MessageTextInput(name="openai_api_version", display_name="OpenAI API Version", advanced=True), MessageTextInput( diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json index 163384b2c..50f9385ac 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-pxptT", + "id": "ChatInput-Y6mi1", "name": "message", "output_types": [ "Message" @@ -13,7 +14,7 @@ }, "targetHandle": { "fieldName": "user_input", - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "inputTypes": [ "Message", "Text" @@ -21,17 +22,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-pxptT{œdataTypeœ:œChatInputœ,œidœ:œChatInput-pxptTœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-1S5SU{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-1S5SUœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-pxptT", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-pxptTœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-1S5SU", - "targetHandle": "{œfieldNameœ: œuser_inputœ, œidœ: œPrompt-1S5SUœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-Y6mi1{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Y6mi1œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-Z4WYI{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-Z4WYIœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-Y6mi1", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-Y6mi1œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-Z4WYI", + "targetHandle": "{œfieldNameœ: œuser_inputœ, œidœ: œPrompt-Z4WYIœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "name": "prompt", "output_types": [ "Message" @@ -39,24 +41,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-nJXWj", + "id": "OpenAIModel-26Eve", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-1S5SU{œdataTypeœ:œPromptœ,œidœ:œPrompt-1S5SUœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-nJXWj{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-nJXWjœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-1S5SU", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-1S5SUœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-nJXWj", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-nJXWjœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-Z4WYI{œdataTypeœ:œPromptœ,œidœ:œPrompt-Z4WYIœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-26Eve{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-26Eveœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-Z4WYI", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-Z4WYIœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-26Eve", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-26Eveœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-nJXWj", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-26Eve", "name": "text_output", "output_types": [ "Message" @@ -64,24 +66,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-XP4bj", + "id": "ChatOutput-cQnVI", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-nJXWj{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-nJXWjœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-XP4bj{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-XP4bjœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-nJXWj", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-nJXWjœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-XP4bj", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-XP4bjœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-26Eve{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-26Eveœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-cQnVI{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-cQnVIœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-26Eve", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-26Eveœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-cQnVI", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-cQnVIœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "ChatInput-pxptT", + "id": "ChatInput-Y6mi1", "node": { "base_classes": [ "Message" @@ -264,7 +266,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-pxptT", + "id": "ChatInput-Y6mi1", "position": { "x": -493.6459512396177, "y": 1083.200545525551 @@ -281,7 +283,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "node": { "base_classes": [ "Message" @@ -389,7 +391,7 @@ }, "dragging": false, "height": 422, - "id": "Prompt-1S5SU", + "id": "Prompt-Z4WYI", "position": { "x": 56.354011530798516, "y": 1157.2005405164796 @@ -404,7 +406,10 @@ }, { "data": { - "id": "OpenAIModel-nJXWj", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-26Eve", "node": { "base_classes": [ "LanguageModel", @@ -416,11 +421,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -477,7 +483,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -499,6 +505,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -572,7 +593,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -580,7 +601,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -660,11 +681,11 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-nJXWj", + "id": "OpenAIModel-26Eve", "position": { "x": 624.3539730827923, "y": 1053.2005475562555 @@ -679,7 +700,7 @@ }, { "data": { - "id": "ChatOutput-XP4bj", + "id": "ChatOutput-cQnVI", "node": { "base_classes": [ "Message" @@ -839,7 +860,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-XP4bj", + "id": "ChatOutput-cQnVI", "position": { "x": 1219.477374823274, "y": 1200.950216973985 @@ -854,15 +875,15 @@ } ], "viewport": { - "x": 392.1085223509972, - "y": -327.49805229761307, - "zoom": 0.5000000676901589 + "x": 366.93776265249005, + "y": -343.56726676261223, + "zoom": 0.5000000676901587 } }, "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ", "endpoint_name": null, - "id": "f652abdc-7ef2-4e52-a00b-847b7aa32cee", + "id": "e533253b-818b-4b5a-9793-55ab83fffb07", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Basic Prompting (Hello, World)" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json index ec4a19aac..7828ac585 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "URL", - "id": "URL-k9NkE", + "id": "URL-rETJU", "name": "data", "output_types": [ "Data" @@ -13,24 +14,25 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-URL-k9NkE{œdataTypeœ:œURLœ,œidœ:œURL-k9NkEœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-EwWXd{œfieldNameœ:œdataœ,œidœ:œParseData-EwWXdœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "URL-k9NkE", - "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-k9NkEœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-EwWXd", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-EwWXdœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-URL-rETJU{œdataTypeœ:œURLœ,œidœ:œURL-rETJUœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-AqSfN{œfieldNameœ:œdataœ,œidœ:œParseData-AqSfNœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "URL-rETJU", + "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-rETJUœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-AqSfN", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-AqSfNœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "name": "text", "output_types": [ "Message" @@ -38,7 +40,7 @@ }, "targetHandle": { "fieldName": "references", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "inputTypes": [ "Message", "Text" @@ -46,17 +48,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-EwWXd{œdataTypeœ:œParseDataœ,œidœ:œParseData-EwWXdœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-B9Mq6{œfieldNameœ:œreferencesœ,œidœ:œPrompt-B9Mq6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-EwWXd", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-EwWXdœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-B9Mq6", - "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-B9Mq6œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-AqSfN{œdataTypeœ:œParseDataœ,œidœ:œParseData-AqSfNœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-rizUK{œfieldNameœ:œreferencesœ,œidœ:œPrompt-rizUKœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-AqSfN", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-AqSfNœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-rizUK", + "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-rizUKœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "TextInput", - "id": "TextInput-uf6ij", + "id": "TextInput-OffFR", "name": "text", "output_types": [ "Message" @@ -64,7 +67,7 @@ }, "targetHandle": { "fieldName": "instructions", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "inputTypes": [ "Message", "Text" @@ -72,17 +75,18 @@ "type": "str" } }, - "id": "reactflow__edge-TextInput-uf6ij{œdataTypeœ:œTextInputœ,œidœ:œTextInput-uf6ijœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-B9Mq6{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-B9Mq6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "TextInput-uf6ij", - "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-uf6ijœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-B9Mq6", - "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-B9Mq6œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-TextInput-OffFR{œdataTypeœ:œTextInputœ,œidœ:œTextInput-OffFRœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-rizUK{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-rizUKœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-OffFR", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-OffFRœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-rizUK", + "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-rizUKœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "name": "prompt", "output_types": [ "Message" @@ -90,24 +94,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-X9ukk", + "id": "OpenAIModel-qmhKV", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-B9Mq6{œdataTypeœ:œPromptœ,œidœ:œPrompt-B9Mq6œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-X9ukk{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-X9ukkœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-B9Mq6", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-B9Mq6œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-X9ukk", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-X9ukkœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-rizUK{œdataTypeœ:œPromptœ,œidœ:œPrompt-rizUKœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-qmhKV{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-qmhKVœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-rizUK", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-rizUKœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-qmhKV", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-qmhKVœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-X9ukk", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-qmhKV", "name": "text_output", "output_types": [ "Message" @@ -115,24 +119,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-5r5Iw", + "id": "ChatOutput-W684s", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-X9ukk{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-X9ukkœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-5r5Iw{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-5r5Iwœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-X9ukk", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-X9ukkœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-5r5Iw", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-5r5Iwœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-qmhKV{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-qmhKVœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-W684s{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-W684sœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-qmhKV", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-qmhKVœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-W684s", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-W684sœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "URL-k9NkE", + "id": "URL-rETJU", "node": { "base_classes": [ "Data" @@ -214,7 +218,7 @@ }, "dragging": false, "height": 358, - "id": "URL-k9NkE", + "id": "URL-rETJU", "position": { "x": 220.79156431407534, "y": 498.8186168722667 @@ -229,7 +233,7 @@ }, { "data": { - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "node": { "base_classes": [ "Message" @@ -346,7 +350,7 @@ }, "dragging": false, "height": 384, - "id": "ParseData-EwWXd", + "id": "ParseData-AqSfN", "position": { "x": 754.3607306709101, "y": 736.8516961537598 @@ -363,7 +367,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "node": { "base_classes": [ "Message" @@ -496,7 +500,7 @@ }, "dragging": false, "height": 515, - "id": "Prompt-B9Mq6", + "id": "Prompt-rizUK", "position": { "x": 1368.0633591447076, "y": 467.19448061224284 @@ -511,7 +515,7 @@ }, { "data": { - "id": "TextInput-uf6ij", + "id": "TextInput-OffFR", "node": { "base_classes": [ "Message" @@ -590,7 +594,7 @@ }, "dragging": false, "height": 308, - "id": "TextInput-uf6ij", + "id": "TextInput-OffFR", "position": { "x": 743.7338453293725, "y": 301.58775454952183 @@ -605,7 +609,10 @@ }, { "data": { - "id": "OpenAIModel-X9ukk", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-qmhKV", "node": { "base_classes": [ "LanguageModel", @@ -617,11 +624,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -678,7 +686,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -700,6 +708,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -773,7 +796,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -781,7 +804,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -861,11 +884,11 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-X9ukk", + "id": "OpenAIModel-qmhKV", "position": { "x": 1899.407626221589, "y": 395.9013619556682 @@ -880,7 +903,7 @@ }, { "data": { - "id": "ChatOutput-5r5Iw", + "id": "ChatOutput-W684s", "node": { "base_classes": [ "Message" @@ -1040,7 +1063,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-5r5Iw", + "id": "ChatOutput-W684s", "position": { "x": 2449.3489426461606, "y": 571.2449700910389 @@ -1062,8 +1085,8 @@ }, "description": "This flow can be used to create a blog post following instructions from the user, using two other blogs as reference.", "endpoint_name": null, - "id": "13da3150-95b9-4d81-9ad2-f635dcdce7ab", + "id": "da9999f8-9013-4bd9-8adb-653c94ebf08c", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Blog Writer" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json index d66c6b22b..d0e7c53b0 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "File", - "id": "File-h46aK", + "id": "File-Q3Xrb", "name": "data", "output_types": [ "Data" @@ -13,24 +14,25 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-File-h46aK{œdataTypeœ:œFileœ,œidœ:œFile-h46aKœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-sqVr1{œfieldNameœ:œdataœ,œidœ:œParseData-sqVr1œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-h46aK", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-h46aKœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-sqVr1", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-sqVr1œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-File-Q3Xrb{œdataTypeœ:œFileœ,œidœ:œFile-Q3Xrbœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-1Y5jJ{œfieldNameœ:œdataœ,œidœ:œParseData-1Y5jJœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-Q3Xrb", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-Q3Xrbœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-1Y5jJ", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-1Y5jJœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "name": "text", "output_types": [ "Message" @@ -38,7 +40,7 @@ }, "targetHandle": { "fieldName": "Document", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "inputTypes": [ "Message", "Text" @@ -46,17 +48,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-sqVr1{œdataTypeœ:œParseDataœ,œidœ:œParseData-sqVr1œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-mQ7w2{œfieldNameœ:œDocumentœ,œidœ:œPrompt-mQ7w2œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-sqVr1", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-sqVr1œ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-mQ7w2", - "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-mQ7w2œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-1Y5jJ{œdataTypeœ:œParseDataœ,œidœ:œParseData-1Y5jJœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-CMJEB{œfieldNameœ:œDocumentœ,œidœ:œPrompt-CMJEBœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-1Y5jJ", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-1Y5jJœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-CMJEB", + "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-CMJEBœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-cMXe0", + "id": "ChatInput-mc7sJ", "name": "message", "output_types": [ "Message" @@ -64,7 +67,7 @@ }, "targetHandle": { "fieldName": "Question", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "inputTypes": [ "Message", "Text" @@ -72,17 +75,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-cMXe0{œdataTypeœ:œChatInputœ,œidœ:œChatInput-cMXe0œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-mQ7w2{œfieldNameœ:œQuestionœ,œidœ:œPrompt-mQ7w2œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-cMXe0", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-cMXe0œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-mQ7w2", - "targetHandle": "{œfieldNameœ: œQuestionœ, œidœ: œPrompt-mQ7w2œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-mc7sJ{œdataTypeœ:œChatInputœ,œidœ:œChatInput-mc7sJœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-CMJEB{œfieldNameœ:œQuestionœ,œidœ:œPrompt-CMJEBœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-mc7sJ", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-mc7sJœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-CMJEB", + "targetHandle": "{œfieldNameœ: œQuestionœ, œidœ: œPrompt-CMJEBœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "name": "prompt", "output_types": [ "Message" @@ -90,24 +94,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-O0AGC", + "id": "OpenAIModel-U2g5u", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-mQ7w2{œdataTypeœ:œPromptœ,œidœ:œPrompt-mQ7w2œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-O0AGC{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-O0AGCœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-mQ7w2", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-mQ7w2œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-O0AGC", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-O0AGCœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-CMJEB{œdataTypeœ:œPromptœ,œidœ:œPrompt-CMJEBœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-U2g5u{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-U2g5uœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-CMJEB", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-CMJEBœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-U2g5u", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-U2g5uœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-O0AGC", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-U2g5u", "name": "text_output", "output_types": [ "Message" @@ -115,24 +119,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-efggd", + "id": "ChatOutput-yZjPO", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-O0AGC{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-O0AGCœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-efggd{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-efggdœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-O0AGC", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-O0AGCœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-efggd", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-efggdœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-U2g5u{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-U2g5uœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-yZjPO{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-yZjPOœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-U2g5u", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-U2g5uœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-yZjPO", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-yZjPOœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "File-h46aK", + "id": "File-Q3Xrb", "node": { "base_classes": [ "Data" @@ -243,7 +247,7 @@ }, "dragging": false, "height": 300, - "id": "File-h46aK", + "id": "File-Q3Xrb", "position": { "x": -449.0807503257012, "y": -253.5304920926106 @@ -258,7 +262,7 @@ }, { "data": { - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "node": { "base_classes": [ "Message" @@ -375,7 +379,7 @@ }, "dragging": false, "height": 384, - "id": "ParseData-sqVr1", + "id": "ParseData-1Y5jJ", "position": { "x": 73.79471204296345, "y": -186.9430114986888 @@ -392,7 +396,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "node": { "base_classes": [ "Message" @@ -525,7 +529,7 @@ }, "dragging": false, "height": 515, - "id": "Prompt-mQ7w2", + "id": "Prompt-CMJEB", "position": { "x": 637.3518652087848, "y": 47.191730368560215 @@ -540,7 +544,7 @@ }, { "data": { - "id": "ChatInput-cMXe0", + "id": "ChatInput-mc7sJ", "node": { "base_classes": [ "Message" @@ -723,7 +727,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-cMXe0", + "id": "ChatInput-mc7sJ", "position": { "x": 50.08709924122684, "y": 320.88186720121615 @@ -738,7 +742,10 @@ }, { "data": { - "id": "OpenAIModel-O0AGC", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-U2g5u", "node": { "base_classes": [ "LanguageModel", @@ -750,11 +757,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -811,7 +819,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -833,6 +841,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -906,7 +929,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -914,7 +937,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -994,26 +1017,26 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-O0AGC", + "id": "OpenAIModel-U2g5u", "position": { - "x": 1227.3672858178775, - "y": 11.61201090144857 + "x": 1249.1992451905348, + "y": 2.8792271523856243 }, "positionAbsolute": { - "x": 1227.3672858178775, - "y": 11.61201090144857 + "x": 1249.1992451905348, + "y": 2.8792271523856243 }, - "selected": false, + "selected": true, "type": "genericNode", "width": 384 }, { "data": { - "id": "ChatOutput-efggd", + "id": "ChatOutput-yZjPO", "node": { "base_classes": [ "Message" @@ -1173,7 +1196,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-efggd", + "id": "ChatOutput-yZjPO", "position": { "x": 1831.1359796346408, "y": 139.5174517327903 @@ -1188,15 +1211,15 @@ } ], "viewport": { - "x": 249.03047748371796, - "y": 251.71203687916693, + "x": 252.03047748371796, + "y": 253.71203687916693, "zoom": 0.4580440916596844 } }, "description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.", "endpoint_name": null, - "id": "4b4cbf9e-34fe-4613-a460-3b7af89b7788", + "id": "483d5200-b59b-4afa-a71f-52fcfcde8fca", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Document QA" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json index c3396f03e..1edb56cd5 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "Memory", - "id": "Memory-uy2TA", + "id": "Memory-VIq7F", "name": "messages_text", "output_types": [ "Message" @@ -13,7 +14,7 @@ }, "targetHandle": { "fieldName": "context", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "inputTypes": [ "Message", "Text" @@ -21,17 +22,18 @@ "type": "str" } }, - "id": "reactflow__edge-Memory-uy2TA{œdataTypeœ:œMemoryœ,œidœ:œMemory-uy2TAœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-m9rUs{œfieldNameœ:œcontextœ,œidœ:œPrompt-m9rUsœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "Memory-uy2TA", - "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-uy2TAœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-m9rUs", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-m9rUsœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Memory-VIq7F{œdataTypeœ:œMemoryœ,œidœ:œMemory-VIq7Fœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-gEaWL{œfieldNameœ:œcontextœ,œidœ:œPrompt-gEaWLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Memory-VIq7F", + "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-VIq7Fœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-gEaWL", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-gEaWLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-hSTqh", + "id": "ChatInput-gIy9N", "name": "message", "output_types": [ "Message" @@ -39,7 +41,7 @@ }, "targetHandle": { "fieldName": "user_message", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "inputTypes": [ "Message", "Text" @@ -47,17 +49,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-hSTqh{œdataTypeœ:œChatInputœ,œidœ:œChatInput-hSTqhœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-m9rUs{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-m9rUsœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-hSTqh", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-hSTqhœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-m9rUs", - "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-m9rUsœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-gIy9N{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gIy9Nœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-gEaWL{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-gEaWLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-gIy9N", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-gIy9Nœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-gEaWL", + "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-gEaWLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "name": "prompt", "output_types": [ "Message" @@ -65,24 +68,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-WmUtU", + "id": "OpenAIModel-uNcAU", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-m9rUs{œdataTypeœ:œPromptœ,œidœ:œPrompt-m9rUsœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-WmUtU{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-WmUtUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-m9rUs", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-m9rUsœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-WmUtU", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-WmUtUœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-gEaWL{œdataTypeœ:œPromptœ,œidœ:œPrompt-gEaWLœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-uNcAU{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-uNcAUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-gEaWL", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-gEaWLœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-uNcAU", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-uNcAUœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-WmUtU", + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-uNcAU", "name": "text_output", "output_types": [ "Message" @@ -90,24 +93,24 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-LIvGN", + "id": "ChatOutput-KtSB9", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-WmUtU{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-WmUtUœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-LIvGN{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-LIvGNœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-WmUtU", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-WmUtUœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-LIvGN", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-LIvGNœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-uNcAU{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-uNcAUœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-KtSB9{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-KtSB9œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-uNcAU", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-uNcAUœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-KtSB9", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-KtSB9œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "id": "Memory-uy2TA", + "id": "Memory-VIq7F", "node": { "base_classes": [ "Data", @@ -296,7 +299,7 @@ }, "dragging": false, "height": 266, - "id": "Memory-uy2TA", + "id": "Memory-VIq7F", "position": { "x": 1264.7588980556088, "y": 506.6868269980502 @@ -313,7 +316,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "node": { "base_classes": [ "Message" @@ -446,7 +449,7 @@ }, "dragging": false, "height": 515, - "id": "Prompt-m9rUs", + "id": "Prompt-gEaWL", "position": { "x": 1880.8227904110583, "y": 625.8049209882275 @@ -461,7 +464,7 @@ }, { "data": { - "id": "ChatInput-hSTqh", + "id": "ChatInput-gIy9N", "node": { "base_classes": [ "Message" @@ -644,7 +647,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-hSTqh", + "id": "ChatInput-gIy9N", "position": { "x": 1275.9262193671882, "y": 836.1228056896347 @@ -659,7 +662,10 @@ }, { "data": { - "id": "OpenAIModel-WmUtU", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-uNcAU", "node": { "base_classes": [ "LanguageModel", @@ -671,11 +677,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -732,7 +739,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -754,6 +761,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -827,7 +849,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -835,7 +857,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -915,11 +937,11 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-WmUtU", + "id": "OpenAIModel-uNcAU", "position": { "x": 2428.0215346784357, "y": 569.9683144303319 @@ -934,7 +956,7 @@ }, { "data": { - "id": "ChatOutput-LIvGN", + "id": "ChatOutput-KtSB9", "node": { "base_classes": [ "Message" @@ -1094,7 +1116,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-LIvGN", + "id": "ChatOutput-KtSB9", "position": { "x": 2988.248820475989, "y": 705.837390387878 @@ -1116,8 +1138,8 @@ }, "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", "endpoint_name": null, - "id": "2a47bc35-69ca-4d8b-9895-2a7fab222b9f", + "id": "16c029a0-0d89-4c36-8a8c-e5410206df38", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Memory Chatbot" } \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json index ae6115c67..7890fe164 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json @@ -2,10 +2,11 @@ "data": { "edges": [ { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "name": "message", "output_types": [ "Message" @@ -13,50 +14,25 @@ }, "targetHandle": { "fieldName": "search_input", - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-ChatInput-c4xn9{œdataTypeœ:œChatInputœ,œidœ:œChatInput-c4xn9œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraDB-7nAHJ{œfieldNameœ:œsearch_inputœ,œidœ:œAstraDB-7nAHJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "ChatInput-c4xn9", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-c4xn9œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "AstraDB-7nAHJ", - "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraDB-7nAHJœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-BKXc6", - "name": "embeddings", - "output_types": [ - "Embeddings" - ] - }, - "targetHandle": { - "fieldName": "embedding", - "id": "AstraDB-7nAHJ", - "inputTypes": [ - "Embeddings", - "dict" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIEmbeddings-BKXc6{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-BKXc6œ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-7nAHJ{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-7nAHJœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-BKXc6", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-BKXc6œ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraDB-7nAHJ", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-7nAHJœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-ChatInput-tuEeg{œdataTypeœ:œChatInputœ,œidœ:œChatInput-tuEegœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraDB-xVF1f{œfieldNameœ:œsearch_inputœ,œidœ:œAstraDB-xVF1fœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-tuEeg", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-tuEegœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "AstraDB-xVF1f", + "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraDB-xVF1fœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "AstraDB", - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "name": "search_results", "output_types": [ "Data" @@ -64,24 +40,25 @@ }, "targetHandle": { "fieldName": "data", - "id": "ParseData-d61Q0", + "id": "ParseData-ZG3Aa", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-AstraDB-7nAHJ{œdataTypeœ:œAstraDBœ,œidœ:œAstraDB-7nAHJœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-d61Q0{œfieldNameœ:œdataœ,œidœ:œParseData-d61Q0œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "AstraDB-7nAHJ", - "sourceHandle": "{œdataTypeœ: œAstraDBœ, œidœ: œAstraDB-7nAHJœ, œnameœ: œsearch_resultsœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-d61Q0", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-d61Q0œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-AstraDB-xVF1f{œdataTypeœ:œAstraDBœ,œidœ:œAstraDB-xVF1fœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-ZG3Aa{œfieldNameœ:œdataœ,œidœ:œParseData-ZG3Aaœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "AstraDB-xVF1f", + "sourceHandle": "{œdataTypeœ: œAstraDBœ, œidœ: œAstraDB-xVF1fœ, œnameœ: œsearch_resultsœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-ZG3Aa", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-ZG3Aaœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-d61Q0", + "id": "ParseData-ZG3Aa", "name": "text", "output_types": [ "Message" @@ -89,7 +66,7 @@ }, "targetHandle": { "fieldName": "context", - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "inputTypes": [ "Message", "Text" @@ -97,17 +74,18 @@ "type": "str" } }, - "id": "reactflow__edge-ParseData-d61Q0{œdataTypeœ:œParseDataœ,œidœ:œParseData-d61Q0œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-vqAlG{œfieldNameœ:œcontextœ,œidœ:œPrompt-vqAlGœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-d61Q0", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-d61Q0œ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-vqAlG", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-vqAlGœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-ZG3Aa{œdataTypeœ:œParseDataœ,œidœ:œParseData-ZG3Aaœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-0Hp9v{œfieldNameœ:œcontextœ,œidœ:œPrompt-0Hp9vœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-ZG3Aa", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-ZG3Aaœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-0Hp9v", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-0Hp9vœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "ChatInput", - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "name": "message", "output_types": [ "Message" @@ -115,7 +93,7 @@ }, "targetHandle": { "fieldName": "question", - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "inputTypes": [ "Message", "Text" @@ -123,17 +101,18 @@ "type": "str" } }, - "id": "reactflow__edge-ChatInput-c4xn9{œdataTypeœ:œChatInputœ,œidœ:œChatInput-c4xn9œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-vqAlG{œfieldNameœ:œquestionœ,œidœ:œPrompt-vqAlGœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-c4xn9", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-c4xn9œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-vqAlG", - "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-vqAlGœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-tuEeg{œdataTypeœ:œChatInputœ,œidœ:œChatInput-tuEegœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-0Hp9v{œfieldNameœ:œquestionœ,œidœ:œPrompt-0Hp9vœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-tuEeg", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-tuEegœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-0Hp9v", + "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-0Hp9vœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "name": "prompt", "output_types": [ "Message" @@ -141,49 +120,25 @@ }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-ybL3k", + "id": "OpenAIModel-BQXFs", "inputTypes": [ "Message" ], "type": "str" } }, - "id": "reactflow__edge-Prompt-vqAlG{œdataTypeœ:œPromptœ,œidœ:œPrompt-vqAlGœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-ybL3k{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-ybL3kœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-vqAlG", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-vqAlGœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-ybL3k", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-ybL3kœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-ybL3k", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-BpzuD", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-ybL3k{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-ybL3kœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-BpzuD{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-BpzuDœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-ybL3k", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-ybL3kœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-BpzuD", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-BpzuDœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-0Hp9v{œdataTypeœ:œPromptœ,œidœ:œPrompt-0Hp9vœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-BQXFs{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-BQXFsœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-0Hp9v", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-0Hp9vœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-BQXFs", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-BQXFsœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "File", - "id": "File-bf6wn", + "id": "File-BTJVJ", "name": "data", "output_types": [ "Data" @@ -191,24 +146,25 @@ }, "targetHandle": { "fieldName": "data_inputs", - "id": "SplitText-52wBo", + "id": "SplitText-RkdZ3", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-File-bf6wn{œdataTypeœ:œFileœ,œidœ:œFile-bf6wnœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-52wBo{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-52wBoœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-bf6wn", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-bf6wnœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "SplitText-52wBo", - "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-52wBoœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-File-BTJVJ{œdataTypeœ:œFileœ,œidœ:œFile-BTJVJœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-RkdZ3{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-RkdZ3œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-BTJVJ", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-BTJVJœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "SplitText-RkdZ3", + "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-RkdZ3œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "className": "", "data": { "sourceHandle": { "dataType": "SplitText", - "id": "SplitText-52wBo", + "id": "SplitText-RkdZ3", "name": "chunks", "output_types": [ "Data" @@ -216,24 +172,49 @@ }, "targetHandle": { "fieldName": "ingest_data", - "id": "AstraDB-vyd5U", + "id": "AstraDB-XXizY", "inputTypes": [ "Data" ], "type": "other" } }, - "id": "reactflow__edge-SplitText-52wBo{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-52wBoœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraDB-vyd5U{œfieldNameœ:œingest_dataœ,œidœ:œAstraDB-vyd5Uœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "SplitText-52wBo", - "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-52wBoœ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", - "target": "AstraDB-vyd5U", - "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraDB-vyd5Uœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-SplitText-RkdZ3{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-RkdZ3œ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraDB-XXizY{œfieldNameœ:œingest_dataœ,œidœ:œAstraDB-XXizYœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "SplitText-RkdZ3", + "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-RkdZ3œ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", + "target": "AstraDB-XXizY", + "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraDB-XXizYœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + }, + { + "data": { + "sourceHandle": { + "dataType": "OpenAIModelComponent", + "id": "OpenAIModel-BQXFs", + "name": "text_output", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-fDyGT", + "inputTypes": [ + "Message" + ], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-BQXFs{œdataTypeœ:œOpenAIModelComponentœ,œidœ:œOpenAIModel-BQXFsœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-fDyGT{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-fDyGTœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-BQXFs", + "sourceHandle": "{œdataTypeœ: œOpenAIModelComponentœ, œidœ: œOpenAIModel-BQXFsœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-fDyGT", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-fDyGTœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "data": { "sourceHandle": { "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-sRZMc", + "id": "OpenAIEmbeddings-fpOKp", "name": "embeddings", "output_types": [ "Embeddings" @@ -241,7 +222,7 @@ }, "targetHandle": { "fieldName": "embedding", - "id": "AstraDB-vyd5U", + "id": "AstraDB-XXizY", "inputTypes": [ "Embeddings", "dict" @@ -249,17 +230,43 @@ "type": "other" } }, - "id": "reactflow__edge-OpenAIEmbeddings-sRZMc{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-sRZMcœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-vyd5U{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-vyd5Uœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-sRZMc", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-sRZMcœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraDB-vyd5U", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-vyd5Uœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-OpenAIEmbeddings-fpOKp{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-fpOKpœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-XXizY{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-XXizYœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-fpOKp", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-fpOKpœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraDB-XXizY", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-XXizYœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + }, + { + "data": { + "sourceHandle": { + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-lCQlU", + "name": "embeddings", + "output_types": [ + "Embeddings" + ] + }, + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDB-xVF1f", + "inputTypes": [ + "Embeddings", + "dict" + ], + "type": "other" + } + }, + "id": "reactflow__edge-OpenAIEmbeddings-lCQlU{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-lCQlUœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-xVF1f{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-xVF1fœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-lCQlU", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-lCQlUœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraDB-xVF1f", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-xVF1fœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" } ], "nodes": [ { "data": { - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "node": { "base_classes": [ "Message" @@ -442,7 +449,7 @@ }, "dragging": false, "height": 308, - "id": "ChatInput-c4xn9", + "id": "ChatInput-tuEeg", "position": { "x": 642.3545710150049, "y": 220.22556606238678 @@ -457,7 +464,7 @@ }, { "data": { - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "node": { "base_classes": [ "Data", @@ -863,7 +870,7 @@ }, "dragging": false, "height": 753, - "id": "AstraDB-7nAHJ", + "id": "AstraDB-xVF1f", "position": { "x": 1246.0381406498648, "y": 333.25157075413966 @@ -878,437 +885,7 @@ }, { "data": { - "id": "OpenAIEmbeddings-BKXc6", - "node": { - "base_classes": [ - "Embeddings" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generate embeddings using OpenAI models.", - "display_name": "OpenAI Embeddings", - "documentation": "", - "edited": false, - "field_order": [ - "default_headers", - "default_query", - "chunk_size", - "client", - "deployment", - "embedding_ctx_length", - "max_retries", - "model", - "model_kwargs", - "openai_api_base", - "openai_api_key", - "openai_api_type", - "openai_api_version", - "openai_organization", - "openai_proxy", - "request_timeout", - "show_progress_bar", - "skip_empty", - "tiktoken_model_name", - "tiktoken_enable" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Embeddings", - "hidden": false, - "method": "build_embeddings", - "name": "embeddings", - "selected": "Embeddings", - "types": [ - "Embeddings" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "chunk_size": { - "advanced": true, - "display_name": "Chunk Size", - "dynamic": false, - "info": "", - "list": false, - "name": "chunk_size", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1000 - }, - "client": { - "advanced": true, - "display_name": "Client", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "client", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" - }, - "default_headers": { - "advanced": true, - "display_name": "Default Headers", - "dynamic": false, - "info": "Default headers to use for the API request.", - "list": false, - "name": "default_headers", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "default_query": { - "advanced": true, - "display_name": "Default Query", - "dynamic": false, - "info": "Default query parameters to use for the API request.", - "list": false, - "name": "default_query", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "deployment": { - "advanced": true, - "display_name": "Deployment", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "deployment", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "embedding_ctx_length": { - "advanced": true, - "display_name": "Embedding Context Length", - "dynamic": false, - "info": "", - "list": false, - "name": "embedding_ctx_length", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1536 - }, - "max_retries": { - "advanced": true, - "display_name": "Max Retries", - "dynamic": false, - "info": "", - "list": false, - "name": "max_retries", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 3 - }, - "model": { - "advanced": false, - "display_name": "Model", - "dynamic": false, - "info": "", - "name": "model", - "options": [ - "text-embedding-3-small", - "text-embedding-3-large", - "text-embedding-ada-002" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "text-embedding-3-small" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "", - "input_types": [], - "load_from_db": true, - "name": "openai_api_base", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "openai_api_key": { - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "", - "input_types": [], - "load_from_db": false, - "name": "openai_api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "openai_api_type": { - "advanced": true, - "display_name": "OpenAI API Type", - "dynamic": false, - "info": "", - "input_types": [], - "load_from_db": true, - "name": "openai_api_type", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "openai_api_version": { - "advanced": true, - "display_name": "OpenAI API Version", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "openai_api_version", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "openai_organization": { - "advanced": true, - "display_name": "OpenAI Organization", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "openai_organization", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "openai_proxy": { - "advanced": true, - "display_name": "OpenAI Proxy", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "openai_proxy", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "request_timeout": { - "advanced": true, - "display_name": "Request Timeout", - "dynamic": false, - "info": "", - "list": false, - "name": "request_timeout", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": "" - }, - "show_progress_bar": { - "advanced": true, - "display_name": "Show Progress Bar", - "dynamic": false, - "info": "", - "list": false, - "name": "show_progress_bar", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "skip_empty": { - "advanced": true, - "display_name": "Skip Empty", - "dynamic": false, - "info": "", - "list": false, - "name": "skip_empty", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "tiktoken_enable": { - "advanced": true, - "display_name": "TikToken Enable", - "dynamic": false, - "info": "If False, you must have transformers installed.", - "list": false, - "name": "tiktoken_enable", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "tiktoken_model_name": { - "advanced": true, - "display_name": "TikToken Model Name", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "tiktoken_model_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - } - } - }, - "type": "OpenAIEmbeddings" - }, - "dragging": false, - "height": 394, - "id": "OpenAIEmbeddings-BKXc6", - "position": { - "x": 603.2488770584523, - "y": 661.6162066128852 - }, - "positionAbsolute": { - "x": 603.2488770584523, - "y": 661.6162066128852 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "ParseData-d61Q0", + "id": "ParseData-ZG3Aa", "node": { "base_classes": [ "Message" @@ -1425,7 +1002,7 @@ }, "dragging": false, "height": 384, - "id": "ParseData-d61Q0", + "id": "ParseData-ZG3Aa", "position": { "x": 1854.1518317915907, "y": 459.3386924128532 @@ -1442,7 +1019,7 @@ "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "node": { "base_classes": [ "Message" @@ -1575,7 +1152,7 @@ }, "dragging": false, "height": 515, - "id": "Prompt-vqAlG", + "id": "Prompt-0Hp9v", "position": { "x": 2486.0988668404975, "y": 496.5120474157301 @@ -1590,7 +1167,10 @@ }, { "data": { - "id": "OpenAIModel-ybL3k", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "edited": false, + "id": "OpenAIModel-BQXFs", "node": { "base_classes": [ "LanguageModel", @@ -1602,11 +1182,12 @@ "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", "documentation": "", - "edited": false, + "edited": true, "field_order": [ "input_value", "max_tokens", "model_kwargs", + "json_mode", "output_schema", "model_name", "openai_api_base", @@ -1663,7 +1244,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict)\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n inputs = [\n MessageInput(name=\"input_value\", display_name=\"Input\"),\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"openai_api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n BoolInput(name=\"stream\", display_name=\"Stream\", info=STREAM_INFO_TEXT, advanced=True),\n StrInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"System message to pass to the model.\",\n advanced=True,\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n # self.output_schea is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.openai_api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n model_kwargs[\"seed\"] = seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -1685,6 +1266,21 @@ "type": "str", "value": "" }, + "json_mode": { + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, "max_tokens": { "advanced": true, "display_name": "Max Tokens", @@ -1758,7 +1354,7 @@ "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -1766,7 +1362,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "output_schema": { "advanced": true, @@ -1846,11 +1442,11 @@ } } }, - "type": "OpenAIModel" + "type": "OpenAIModelComponent" }, "dragging": false, "height": 621, - "id": "OpenAIModel-ybL3k", + "id": "OpenAIModel-BQXFs", "position": { "x": 3145.6693008609222, "y": 374.23955005474204 @@ -1865,7 +1461,7 @@ }, { "data": { - "id": "ChatOutput-BpzuD", + "id": "ChatOutput-fDyGT", "node": { "base_classes": [ "Message" @@ -2025,7 +1621,7 @@ }, "dragging": false, "height": 308, - "id": "ChatOutput-BpzuD", + "id": "ChatOutput-fDyGT", "position": { "x": 3769.242086248817, "y": 585.3403837062634 @@ -2040,7 +1636,7 @@ }, { "data": { - "id": "SplitText-52wBo", + "id": "SplitText-RkdZ3", "node": { "base_classes": [ "Data" @@ -2170,7 +1766,7 @@ }, "dragging": false, "height": 527, - "id": "SplitText-52wBo", + "id": "SplitText-RkdZ3", "position": { "x": 2044.2799160989089, "y": 1185.3130355818519 @@ -2185,7 +1781,7 @@ }, { "data": { - "id": "File-bf6wn", + "id": "File-BTJVJ", "node": { "base_classes": [ "Data" @@ -2296,7 +1892,7 @@ }, "dragging": false, "height": 300, - "id": "File-bf6wn", + "id": "File-BTJVJ", "position": { "x": 1418.981990122179, "y": 1539.3825691184466 @@ -2311,7 +1907,7 @@ }, { "data": { - "id": "AstraDB-vyd5U", + "id": "AstraDB-XXizY", "node": { "base_classes": [ "Data", @@ -2716,7 +2312,7 @@ }, "dragging": false, "height": 753, - "id": "AstraDB-vyd5U", + "id": "AstraDB-XXizY", "position": { "x": 2676.4816074350247, "y": 1269.304067004569 @@ -2731,7 +2327,7 @@ }, { "data": { - "id": "OpenAIEmbeddings-sRZMc", + "id": "OpenAIEmbeddings-fpOKp", "node": { "base_classes": [ "Embeddings" @@ -2836,7 +2432,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" + "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" }, "default_headers": { "advanced": true, @@ -2958,7 +2554,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": true, + "load_from_db": false, "name": "openai_api_base", "password": true, "placeholder": "", @@ -2974,7 +2570,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": false, + "load_from_db": true, "name": "openai_api_key", "password": true, "placeholder": "", @@ -2982,7 +2578,7 @@ "show": true, "title_case": false, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, "openai_api_type": { "advanced": true, @@ -2990,7 +2586,7 @@ "dynamic": false, "info": "", "input_types": [], - "load_from_db": true, + "load_from_db": false, "name": "openai_api_type", "password": true, "placeholder": "", @@ -3146,14 +2742,444 @@ }, "dragging": false, "height": 394, - "id": "OpenAIEmbeddings-sRZMc", + "id": "OpenAIEmbeddings-fpOKp", "position": { - "x": 2050.0569098721217, - "y": 1823.5240486490072 + "x": 2044.683126356786, + "y": 1785.2283494456522 }, "positionAbsolute": { - "x": 2050.0569098721217, - "y": 1823.5240486490072 + "x": 2044.683126356786, + "y": 1785.2283494456522 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "id": "OpenAIEmbeddings-lCQlU", + "node": { + "base_classes": [ + "Embeddings" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generate embeddings using OpenAI models.", + "display_name": "OpenAI Embeddings", + "documentation": "", + "edited": false, + "field_order": [ + "default_headers", + "default_query", + "chunk_size", + "client", + "deployment", + "embedding_ctx_length", + "max_retries", + "model", + "model_kwargs", + "openai_api_base", + "openai_api_key", + "openai_api_type", + "openai_api_version", + "openai_organization", + "openai_proxy", + "request_timeout", + "show_progress_bar", + "skip_empty", + "tiktoken_model_name", + "tiktoken_enable" + ], + "frozen": false, + "icon": "OpenAI", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Embeddings", + "hidden": false, + "method": "build_embeddings", + "name": "embeddings", + "selected": "Embeddings", + "types": [ + "Embeddings" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "chunk_size": { + "advanced": true, + "display_name": "Chunk Size", + "dynamic": false, + "info": "", + "list": false, + "name": "chunk_size", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1000 + }, + "client": { + "advanced": true, + "display_name": "Client", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "client", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=[\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n )\n" + }, + "default_headers": { + "advanced": true, + "display_name": "Default Headers", + "dynamic": false, + "info": "Default headers to use for the API request.", + "list": false, + "name": "default_headers", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "default_query": { + "advanced": true, + "display_name": "Default Query", + "dynamic": false, + "info": "Default query parameters to use for the API request.", + "list": false, + "name": "default_query", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "deployment": { + "advanced": true, + "display_name": "Deployment", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "deployment", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "embedding_ctx_length": { + "advanced": true, + "display_name": "Embedding Context Length", + "dynamic": false, + "info": "", + "list": false, + "name": "embedding_ctx_length", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1536 + }, + "max_retries": { + "advanced": true, + "display_name": "Max Retries", + "dynamic": false, + "info": "", + "list": false, + "name": "max_retries", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 3 + }, + "model": { + "advanced": false, + "display_name": "Model", + "dynamic": false, + "info": "", + "name": "model", + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "text-embedding-3-small" + }, + "model_kwargs": { + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "openai_api_base": { + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "", + "input_types": [], + "load_from_db": false, + "name": "openai_api_base", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "openai_api_key": { + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "", + "input_types": [], + "load_from_db": true, + "name": "openai_api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "openai_api_type": { + "advanced": true, + "display_name": "OpenAI API Type", + "dynamic": false, + "info": "", + "input_types": [], + "load_from_db": false, + "name": "openai_api_type", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "openai_api_version": { + "advanced": true, + "display_name": "OpenAI API Version", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "openai_api_version", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "openai_organization": { + "advanced": true, + "display_name": "OpenAI Organization", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "openai_organization", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "openai_proxy": { + "advanced": true, + "display_name": "OpenAI Proxy", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "openai_proxy", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "request_timeout": { + "advanced": true, + "display_name": "Request Timeout", + "dynamic": false, + "info": "", + "list": false, + "name": "request_timeout", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": "" + }, + "show_progress_bar": { + "advanced": true, + "display_name": "Show Progress Bar", + "dynamic": false, + "info": "", + "list": false, + "name": "show_progress_bar", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "skip_empty": { + "advanced": true, + "display_name": "Skip Empty", + "dynamic": false, + "info": "", + "list": false, + "name": "skip_empty", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "tiktoken_enable": { + "advanced": true, + "display_name": "TikToken Enable", + "dynamic": false, + "info": "If False, you must have transformers installed.", + "list": false, + "name": "tiktoken_enable", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "tiktoken_model_name": { + "advanced": true, + "display_name": "TikToken Model Name", + "dynamic": false, + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "tiktoken_model_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "OpenAIEmbeddings" + }, + "dragging": false, + "height": 394, + "id": "OpenAIEmbeddings-lCQlU", + "position": { + "x": 628.9252513328779, + "y": 648.6750537749285 + }, + "positionAbsolute": { + "x": 628.9252513328779, + "y": 648.6750537749285 }, "selected": false, "type": "genericNode", @@ -3161,15 +3187,15 @@ } ], "viewport": { - "x": -108.04801490857153, - "y": -44.38043074355511, - "zoom": 0.32281188532359256 + "x": -110.08684771034166, + "y": -46.27017080984389, + "zoom": 0.3228119071747796 } }, "description": "Visit https://docs.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", "endpoint_name": null, - "id": "7804e4a4-8e16-45e0-88ab-ed6248daa0eb", + "id": "f1a53ec2-49e2-4029-b8a8-1a73079f9653", "is_component": false, - "last_tested_version": "1.0.0rc1", + "last_tested_version": "1.0.5", "name": "Vector Store RAG" } \ No newline at end of file diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index cb65a71c1..10e5aa3e4 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -174,9 +174,10 @@ export default function ParameterComponent({ const handleOnNewValue = async ( newValue: string | string[] | boolean | Object[], + dbValue?: boolean, skipSnapshot: boolean | undefined = false, ): Promise => { - handleOnNewValueHook(newValue, skipSnapshot); + handleOnNewValueHook(newValue, dbValue, skipSnapshot); }; const handleNodeClass = (newNodeClass: APIClassType, code?: string): void => { @@ -470,16 +471,6 @@ export default function ParameterComponent({ { - setNode(data.id, (oldNode) => { - let newNode = cloneDeep(oldNode); - newNode.data = { - ...newNode.data, - }; - newNode.data.node.template[name].load_from_db = value; - return newNode; - }); - }} name={name} data={data.node?.template[name]!} /> diff --git a/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx b/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx index a32c4d2dc..25ef54825 100644 --- a/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx +++ b/src/frontend/src/CustomNodes/hooks/use-handle-new-value.tsx @@ -18,7 +18,7 @@ const useHandleOnNewValue = ( ) => { const setErrorData = useAlertStore((state) => state.setErrorData); - const handleOnNewValue = async (newValue, skipSnapshot = false) => { + const handleOnNewValue = async (newValue, dbValue, skipSnapshot = false) => { const nodeTemplate = data.node!.template[name]; const currentValue = nodeTemplate.value; @@ -63,6 +63,10 @@ const useHandleOnNewValue = ( ...newNode.data, }; + if (dbValue) { + newNode.data.node.template[name].load_from_db = dbValue; + } + if (data.node?.template[name].real_time_refresh && newTemplate) { newNode.data.node.template = newTemplate; } else { diff --git a/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx index 415cd92d7..4dcda2deb 100644 --- a/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx +++ b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx @@ -29,7 +29,9 @@ export default function AddNewVariableButton({ const setErrorData = useAlertStore((state) => state.setErrorData); const componentFields = useTypesStore((state) => state.ComponentFields); const unavaliableFields = new Set( - Object.keys(useGlobalVariablesStore((state) => state.unavaliableFields)), + Object.keys( + useGlobalVariablesStore((state) => state.unavaliableFields) ?? {}, + ), ); const availableFields = () => { diff --git a/src/frontend/src/components/codeAreaComponent/index.tsx b/src/frontend/src/components/codeAreaComponent/index.tsx index 00721137f..ad8cfaf6f 100644 --- a/src/frontend/src/components/codeAreaComponent/index.tsx +++ b/src/frontend/src/components/codeAreaComponent/index.tsx @@ -23,7 +23,7 @@ export default function CodeAreaComponent({ useEffect(() => { if (disabled && myValue !== "") { setMyValue(""); - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/components/floatComponent/index.tsx b/src/frontend/src/components/floatComponent/index.tsx index 8db5b823a..07de1fbb2 100644 --- a/src/frontend/src/components/floatComponent/index.tsx +++ b/src/frontend/src/components/floatComponent/index.tsx @@ -16,7 +16,7 @@ export default function FloatComponent({ // Clear component state useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/components/inputFileComponent/index.tsx b/src/frontend/src/components/inputFileComponent/index.tsx index 9710f7f61..f8dd9725f 100644 --- a/src/frontend/src/components/inputFileComponent/index.tsx +++ b/src/frontend/src/components/inputFileComponent/index.tsx @@ -27,7 +27,7 @@ export default function InputFileComponent({ useEffect(() => { if (disabled && value !== "") { setMyValue(""); - onChange("", true); + onChange("", undefined, true); onFileChange(""); } }, [disabled, onChange]); diff --git a/src/frontend/src/components/inputGlobalComponent/index.tsx b/src/frontend/src/components/inputGlobalComponent/index.tsx index 4d20e8132..1d3cf4f60 100644 --- a/src/frontend/src/components/inputGlobalComponent/index.tsx +++ b/src/frontend/src/components/inputGlobalComponent/index.tsx @@ -1,4 +1,5 @@ import { useEffect } from "react"; +import { Controller } from "react-hook-form"; import { deleteGlobalVariable } from "../../controllers/API"; import DeleteConfirmationModal from "../../modals/deleteConfirmationModal"; import useAlertStore from "../../stores/alertStore"; @@ -13,7 +14,6 @@ import { CommandItem } from "../ui/command"; export default function InputGlobalComponent({ disabled, onChange, - setDb, name, data, editNode = false, @@ -23,39 +23,17 @@ export default function InputGlobalComponent({ ); const getVariableId = useGlobalVariablesStore((state) => state.getVariableId); - const unavaliableFields = useGlobalVariablesStore( - (state) => state.unavaliableFields, - ); const removeGlobalVariable = useGlobalVariablesStore( (state) => state.removeGlobalVariable, ); const setErrorData = useAlertStore((state) => state.setErrorData); useEffect(() => { - if (data) - if ( - ((globalVariablesEntries && - !globalVariablesEntries.includes(data.value)) || - !globalVariablesEntries) && - data.load_from_db - ) { - setTimeout(() => { - onChange("", true); - setDb(false); - }, 100); + if (data && globalVariablesEntries) + if (data.load_from_db && !globalVariablesEntries.includes(data.value)) { + onChange("", false, true); } - }, [globalVariablesEntries, data]); - - useEffect(() => { - if (!data.value && data.display_name) { - if (unavaliableFields[data.display_name!] && !disabled) { - setTimeout(() => { - setDb(true); - onChange(unavaliableFields[data.display_name!]); - }, 100); - } - } - }, [unavaliableFields]); + }, [globalVariablesEntries]); async function handleDelete(key: string) { const id = getVariableId(key); @@ -64,8 +42,7 @@ export default function InputGlobalComponent({ .then(() => { removeGlobalVariable(key); if (data?.value === key && data?.load_from_db) { - onChange(""); - setDb(false); + onChange("", false); } }) .catch(() => { @@ -137,12 +114,10 @@ export default function InputGlobalComponent({ : "" } setSelectedOption={(value) => { - onChange(value); - setDb(value !== "" ? true : false); + onChange(value, value !== "" ? true : false); }} onChange={(value, skipSnapshot) => { - onChange(value, skipSnapshot); - setDb(false); + onChange(value, false, skipSnapshot); }} /> ); diff --git a/src/frontend/src/components/intComponent/index.tsx b/src/frontend/src/components/intComponent/index.tsx index f0d67a402..7621d6732 100644 --- a/src/frontend/src/components/intComponent/index.tsx +++ b/src/frontend/src/components/intComponent/index.tsx @@ -19,7 +19,7 @@ export default function IntComponent({ // Clear component state useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled, onChange]); diff --git a/src/frontend/src/components/promptComponent/index.tsx b/src/frontend/src/components/promptComponent/index.tsx index 0dcec3dbf..c2e721baf 100644 --- a/src/frontend/src/components/promptComponent/index.tsx +++ b/src/frontend/src/components/promptComponent/index.tsx @@ -19,7 +19,7 @@ export default function PromptAreaComponent({ }: PromptAreaComponentType): JSX.Element { useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx b/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx index d0c3d4e53..1495ff98f 100644 --- a/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx +++ b/src/frontend/src/components/tableComponent/components/tableNodeCellRender/index.tsx @@ -24,32 +24,21 @@ import ToggleShadComponent from "../../../toggleShadComponent"; export default function TableNodeCellRender({ node: { data }, - value: { - value, - nodeClass, - handleOnNewValue: handleOnNewValueNode, - handleOnChangeDb: handleOnChangeDbNode, - }, + value: { value, nodeClass, handleOnNewValue: handleOnNewValueNode }, }: CustomCellRendererProps) { - const handleOnNewValue = (newValue: any, name: string) => { - handleOnNewValueNode(newValue, name); + const handleOnNewValue = (newValue: any, name: string, dbValue?: boolean) => { + handleOnNewValueNode(newValue, name, dbValue); setTemplateData((old) => { let newData = cloneDeep(old); newData.value = newValue; + if (dbValue) { + newData.load_from_db = newValue; + } return newData; }); setTemplateValue(newValue); }; - const handleOnChangeDb = (newValue: boolean, name: string) => { - handleOnChangeDbNode(newValue, name); - setTemplateData((old) => { - let newData = cloneDeep(old); - newData.load_from_db = newValue; - return newData; - }); - }; - const [templateValue, setTemplateValue] = useState(value); const [templateData, setTemplateData] = useState(data); @@ -106,10 +95,9 @@ export default function TableNodeCellRender({ handleOnNewValue(value, templateData.key)} - setDb={(value) => { - handleOnChangeDb(value, templateData.key); - }} + onChange={(value, dbValue, snapshot) => + handleOnNewValue(value, templateData.key, dbValue) + } name={templateData.key} data={templateData} /> diff --git a/src/frontend/src/components/textAreaComponent/index.tsx b/src/frontend/src/components/textAreaComponent/index.tsx index 18a7f5b40..253e3eff9 100644 --- a/src/frontend/src/components/textAreaComponent/index.tsx +++ b/src/frontend/src/components/textAreaComponent/index.tsx @@ -18,7 +18,7 @@ export default function TextAreaComponent({ // Clear text area useEffect(() => { if (disabled && value !== "") { - onChange("", true); + onChange("", undefined, true); } }, [disabled]); diff --git a/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx b/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx index 71f8566de..cad877aa6 100644 --- a/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx +++ b/src/frontend/src/modals/editNodeModal/hooks/use-column-defs.tsx @@ -6,8 +6,7 @@ import { NodeDataType } from "../../../types/flow"; const useColumnDefs = ( myData: NodeDataType, - handleOnNewValue: (newValue: any, name: string) => void, - handleOnChangeDb: (value: boolean, key: string) => void, + handleOnNewValue: (newValue: any, name: string, setDb?: boolean) => void, changeAdvanced: (n: string) => void, open: boolean, ) => { @@ -49,7 +48,6 @@ const useColumnDefs = ( value: params.data.value, nodeClass: myData.node, handleOnNewValue: handleOnNewValue, - handleOnChangeDb: handleOnChangeDb, }; }, minWidth: 340, diff --git a/src/frontend/src/modals/editNodeModal/index.tsx b/src/frontend/src/modals/editNodeModal/index.tsx index 991af0d73..e267b6bac 100644 --- a/src/frontend/src/modals/editNodeModal/index.tsx +++ b/src/frontend/src/modals/editNodeModal/index.tsx @@ -36,12 +36,11 @@ const EditNodeModal = forwardRef( !myData.current.node!.template[n]?.advanced; } - const handleOnNewValue = (newValue: any, key: string) => { + const handleOnNewValue = (newValue: any, key: string, setDb?: boolean) => { myData.current.node!.template[key].value = newValue; - }; - - const handleOnChangeDb = (newValue: boolean, key: string) => { - myData.current.node!.template[key].load_from_db = newValue; + if (setDb) { + myData.current.node!.template[key].load_from_db = newValue; + } }; const rowData = useRowData(data, open); @@ -49,7 +48,6 @@ const EditNodeModal = forwardRef( const columnDefs: ColDef[] = useColumnDefs( data, handleOnNewValue, - handleOnChangeDb, changeAdvanced, open, ); diff --git a/src/frontend/src/modals/newFlowModal/index.tsx b/src/frontend/src/modals/newFlowModal/index.tsx index 37a6afeeb..295ecd395 100644 --- a/src/frontend/src/modals/newFlowModal/index.tsx +++ b/src/frontend/src/modals/newFlowModal/index.tsx @@ -10,12 +10,6 @@ export default function NewFlowModal({ }: newFlowModalPropsType): JSX.Element { const examples = useFlowsManagerStore((state) => state.examples); - examples?.forEach((example) => { - if (example.name === "Blog Writter") { - example.name = "Blog Writer"; - } - }); - return ( diff --git a/src/frontend/src/stores/flowStore.ts b/src/frontend/src/stores/flowStore.ts index df12c944c..0b67dd88f 100644 --- a/src/frontend/src/stores/flowStore.ts +++ b/src/frontend/src/stores/flowStore.ts @@ -40,6 +40,7 @@ import { getInputsAndOutputs } from "../utils/storeUtils"; import useAlertStore from "./alertStore"; import { useDarkStore } from "./darkStore"; import useFlowsManagerStore from "./flowsManagerStore"; +import { useGlobalVariablesStore } from "./globalVariablesStore/globalVariables"; // this is our useStore hook that we can use in our components to get parts of the store and call actions const useFlowStore = create((set, get) => ({ @@ -288,7 +289,12 @@ const useFlowStore = create((set, get) => ({ id: newId, }, }; - updateGroupRecursion(newNode, selection.edges); + updateGroupRecursion( + newNode, + selection.edges, + useGlobalVariablesStore.getState().unavaliableFields, + useGlobalVariablesStore.getState().globalVariablesEntries, + ); // Add the new node to the list of nodes in state newNodes = newNodes diff --git a/src/frontend/src/stores/flowsManagerStore.ts b/src/frontend/src/stores/flowsManagerStore.ts index a804710ac..a99916107 100644 --- a/src/frontend/src/stores/flowsManagerStore.ts +++ b/src/frontend/src/stores/flowsManagerStore.ts @@ -1,3 +1,4 @@ +import { AxiosError } from "axios"; import { cloneDeep } from "lodash"; import pDebounce from "p-debounce"; import { Edge, Node, Viewport, XYPosition } from "reactflow"; @@ -23,11 +24,13 @@ import { extractFieldsFromComponenents, processDataFromFlow, processFlows, + updateGroupRecursion, } from "../utils/reactflowUtils"; import useAlertStore from "./alertStore"; import { useDarkStore } from "./darkStore"; import useFlowStore from "./flowStore"; import { useFolderStore } from "./foldersStore"; +import { useGlobalVariablesStore } from "./globalVariablesStore/globalVariables"; import { useTypesStore } from "./typesStore"; let saveTimeoutId: NodeJS.Timeout | null = null; @@ -202,6 +205,14 @@ const useFlowsManagerStore = create((set, get) => ({ let flowData = flow ? processDataFromFlow(flow) : { nodes: [], edges: [], viewport: { zoom: 1, x: 0, y: 0 } }; + flowData?.nodes.forEach((node) => { + updateGroupRecursion( + node, + flowData?.edges, + useGlobalVariablesStore.getState().unavaliableFields, + useGlobalVariablesStore.getState().globalVariablesEntries, + ); + }); if (newProject) { // Create a new flow with a default name if no flow is provided. const folder_id = useFolderStore.getState().folderUrl; diff --git a/src/frontend/src/stores/globalVariablesStore/globalVariables.ts b/src/frontend/src/stores/globalVariablesStore/globalVariables.ts index 708f7ec09..4f80559f5 100644 --- a/src/frontend/src/stores/globalVariablesStore/globalVariables.ts +++ b/src/frontend/src/stores/globalVariablesStore/globalVariables.ts @@ -4,12 +4,12 @@ import getUnavailableFields from "./utils/get-unavailable-fields"; export const useGlobalVariablesStore = create( (set, get) => ({ - unavaliableFields: {}, + unavaliableFields: undefined, setUnavaliableFields: (fields) => { set({ unavaliableFields: fields }); }, removeUnavaliableField: (field) => { - const newFields = get().unavaliableFields; + const newFields = get().unavaliableFields || {}; delete newFields[field]; set({ unavaliableFields: newFields }); }, @@ -18,7 +18,7 @@ export const useGlobalVariablesStore = create( setGlobalVariables: (variables) => { set({ globalVariables: variables, - globalVariablesEntries: Object.keys(variables), + globalVariablesEntries: Object.keys(variables) || [], unavaliableFields: getUnavailableFields(variables), }); }, @@ -27,7 +27,7 @@ export const useGlobalVariablesStore = create( const newVariables = { ...get().globalVariables, [name]: data }; set({ globalVariables: newVariables, - globalVariablesEntries: Object.keys(newVariables), + globalVariablesEntries: Object.keys(newVariables) || [], unavaliableFields: getUnavailableFields(newVariables), }); }, @@ -38,7 +38,7 @@ export const useGlobalVariablesStore = create( delete newVariables[name]; set({ globalVariables: newVariables, - globalVariablesEntries: Object.keys(newVariables), + globalVariablesEntries: Object.keys(newVariables) || [], unavaliableFields: getUnavailableFields(newVariables), }); }, diff --git a/src/frontend/src/types/components/index.ts b/src/frontend/src/types/components/index.ts index 09324920f..7c5c60579 100644 --- a/src/frontend/src/types/components/index.ts +++ b/src/frontend/src/types/components/index.ts @@ -91,8 +91,7 @@ export type InputListComponentType = { export type InputGlobalComponentType = { disabled: boolean; - onChange: (value: string, snapshot?: boolean) => void; - setDb: (value: boolean) => void; + onChange: (value: string, dbValue: boolean, snapshot?: boolean) => void; name: string; data: InputFieldType; editNode?: boolean; @@ -124,7 +123,11 @@ export type TextAreaComponentType = { nodeClass?: APIClassType; setNodeClass?: (value: APIClassType) => void; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; editNode?: boolean; id?: string; @@ -146,7 +149,11 @@ export type PromptAreaComponentType = { nodeClass?: APIClassType; setNodeClass?: (value: APIClassType, code?: string) => void; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; readonly?: boolean; editNode?: boolean; @@ -156,7 +163,11 @@ export type PromptAreaComponentType = { export type CodeAreaComponentType = { setOpenModal?: (bool: boolean) => void; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; editNode?: boolean; nodeClass?: APIClassType; @@ -171,7 +182,11 @@ export type CodeAreaComponentType = { export type FileComponentType = { IOInputProps?; disabled: boolean; - onChange: (value: string[] | string, skipSnapshot?: boolean) => void; + onChange: ( + value: string[] | string, + dbValue?: boolean, + skipSnapshot?: boolean, + ) => void; value: string; fileTypes: Array; onFileChange: (value: string) => void; @@ -204,7 +219,7 @@ export type IntComponentType = { value: string; disabled?: boolean; rangeSpec: RangeSpecType; - onChange: (value: string, skipSnapshot?: boolean) => void; + onChange: (value: string, dbValue?: boolean, skipSnapshot?: boolean) => void; editNode?: boolean; id?: string; }; @@ -212,7 +227,7 @@ export type IntComponentType = { export type FloatComponentType = { value: string; disabled?: boolean; - onChange: (value: string, skipSnapshot?: boolean) => void; + onChange: (value: string, dbValue?: boolean, skipSnapshot?: boolean) => void; rangeSpec: RangeSpecType; editNode?: boolean; id?: string; diff --git a/src/frontend/src/types/zustand/globalVariables/index.ts b/src/frontend/src/types/zustand/globalVariables/index.ts index 4b178088c..e4749ee8f 100644 --- a/src/frontend/src/types/zustand/globalVariables/index.ts +++ b/src/frontend/src/types/zustand/globalVariables/index.ts @@ -25,7 +25,7 @@ export type GlobalVariablesStore = { ) => void; removeGlobalVariable: (name: string) => Promise; getVariableId: (name: string) => string | undefined; - unavaliableFields: { [name: string]: string }; + unavaliableFields: { [name: string]: string } | undefined; setUnavaliableFields: (fields: { [name: string]: string }) => void; removeUnavaliableField: (field: string) => void; }; diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts index d16a7e851..e4ac017a8 100644 --- a/src/frontend/src/utils/reactflowUtils.ts +++ b/src/frontend/src/utils/reactflowUtils.ts @@ -1487,11 +1487,30 @@ export function isOutputType(type: string): boolean { return OUTPUT_TYPES.has(type); } -export function updateGroupRecursion(groupNode: NodeType, edges: Edge[]) { +export function updateGroupRecursion( + groupNode: NodeType, + edges: Edge[], + unavailableFields: + | { + [name: string]: string; + } + | undefined, + globalVariablesEntries: string[] | undefined, +) { + updateGlobalVariables( + groupNode.data.node, + unavailableFields, + globalVariablesEntries, + ); if (groupNode.data.node?.flow) { groupNode.data.node.flow.data!.nodes.forEach((node) => { if (node.data.node?.flow) { - updateGroupRecursion(node, node.data.node.flow.data!.edges); + updateGroupRecursion( + node, + node.data.node.flow.data!.edges, + unavailableFields, + globalVariablesEntries, + ); } }); let newFlow = groupNode.data.node!.flow; @@ -1503,6 +1522,41 @@ export function updateGroupRecursion(groupNode: NodeType, edges: Edge[]) { } } +export function updateGlobalVariables( + node: APIClassType | undefined, + unavailableFields: + | { + [name: string]: string; + } + | undefined, + globalVariablesEntries: string[] | undefined, +) { + if (node && node.template) { + Object.keys(node.template).forEach((field) => { + if ( + globalVariablesEntries && + node!.template[field].load_from_db && + !globalVariablesEntries.includes(node!.template[field].value) + ) { + node!.template[field].value = ""; + node!.template[field].load_from_db = false; + } + if ( + !node!.template[field].load_from_db && + node!.template[field].value === "" && + unavailableFields && + Object.keys(unavailableFields).includes( + node!.template[field].display_name ?? "", + ) + ) { + node!.template[field].value = + unavailableFields[node!.template[field].display_name ?? ""]; + node!.template[field].load_from_db = true; + } + }); + } +} + export function getGroupOutputNodeId( flow: FlowType, p_name: string,