Update AstraDB component descriptions

This commit is contained in:
Rodrigo Nader 2024-03-30 14:51:06 -03:00
commit 775fa87f92
3 changed files with 888 additions and 3 deletions

View file

@ -8,7 +8,7 @@ from langflow.schema import Record
class AstraDBSearchComponent(LCVectorStoreComponent):
display_name = "AstraDB Search"
description = "Searches an existing AstraDB Vector Store"
description = "Searches an existing AstraDB Vector Store."
icon = "AstraDB"
field_order = ["token", "api_endpoint", "collection_name", "input_value", "embedding"]

View file

@ -9,8 +9,8 @@ from langflow.schema import Record
class AstraDBVectorStoreComponent(CustomComponent):
display_name = "AstraDB Vector Store"
description = "Builds or loads an AstraDB Vector Store"
display_name = "AstraDB"
description = "Builds or loads an AstraDB Vector Store."
icon = "AstraDB"
field_order = ["token", "api_endpoint", "collection_name", "inputs", "embedding"]

View file

@ -0,0 +1,885 @@
{
"id": "de0eada6-efc0-49bc-830d-a64f87859eac",
"data": {
"nodes": [
{
"id": "Prompt-vbqLJ",
"type": "genericNode",
"position": { "x": 585.7906101139403, "y": 117.52115876762832 },
"data": {
"type": "Prompt",
"node": {
"template": {
"code": {
"type": "code",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"A component for creating prompt templates using dynamic variables.\"\n icon = \"terminal-square\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "code",
"advanced": true,
"dynamic": true,
"info": "",
"load_from_db": false,
"title_case": false
},
"template": {
"type": "prompt",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": "Answer user's questions based on the document below:\n\n---\n\n{document}\n\n---\n\nQuestion:\n{question}\n\nAnswer:\n",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "template",
"display_name": "Template",
"advanced": false,
"input_types": ["Text"],
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false
},
"_type": "CustomComponent",
"document": {
"field_type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "document",
"display_name": "document",
"advanced": false,
"input_types": [
"Document",
"BaseOutputParser",
"Text",
"Record"
],
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"type": "str"
},
"question": {
"field_type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "question",
"display_name": "question",
"advanced": false,
"input_types": [
"Document",
"BaseOutputParser",
"Text",
"Record"
],
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"type": "str"
}
},
"description": "A component for creating prompt templates using dynamic variables.",
"icon": "terminal-square",
"is_input": null,
"is_output": null,
"is_composition": null,
"base_classes": ["str", "Text", "object"],
"name": "",
"display_name": "Prompt",
"documentation": "",
"custom_fields": { "template": ["document", "question"] },
"output_types": ["Text"],
"full_path": null,
"field_formatters": {},
"frozen": false,
"field_order": [],
"beta": false,
"error": null
},
"id": "Prompt-vbqLJ",
"description": "A component for creating prompt templates using dynamic variables.",
"display_name": "Prompt"
},
"selected": false,
"width": 384,
"height": 513,
"positionAbsolute": { "x": 585.7906101139403, "y": 117.52115876762832 },
"dragging": false
},
{
"id": "File-raM7G",
"type": "genericNode",
"position": { "x": -10.56827221474498, "y": -18.235777540136695 },
"data": {
"type": "File",
"node": {
"template": {
"path": {
"type": "file",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"fileTypes": [
".txt",
".md",
".mdx",
".csv",
".json",
".yaml",
".yml",
".xml",
".html",
".htm",
".pdf",
".docx"
],
"file_path": "de0eada6-efc0-49bc-830d-a64f87859eac/The Egg.pdf",
"password": false,
"name": "path",
"display_name": "Path",
"advanced": false,
"dynamic": false,
"info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx",
"load_from_db": false,
"title_case": false,
"value": ""
},
"code": {
"type": "code",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"Files\"\n description = \"A generic file loader.\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "code",
"advanced": true,
"dynamic": true,
"info": "",
"load_from_db": false,
"title_case": false
},
"silent_errors": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "silent_errors",
"display_name": "Silent Errors",
"advanced": true,
"dynamic": false,
"info": "If true, errors will not raise an exception.",
"load_from_db": false,
"title_case": false
},
"_type": "CustomComponent"
},
"description": "A generic file loader.",
"base_classes": ["Record"],
"display_name": "Files",
"documentation": "",
"custom_fields": { "path": null, "silent_errors": null },
"output_types": ["Record"],
"field_formatters": {},
"frozen": false,
"field_order": [],
"beta": false
},
"id": "File-raM7G"
},
"selected": false,
"width": 384,
"height": 281,
"positionAbsolute": {
"x": -10.56827221474498,
"y": -18.235777540136695
},
"dragging": false
},
{
"id": "ChatInput-i8h9h",
"type": "genericNode",
"position": { "x": -20.8061754985539, "y": 392.20173628530654 },
"data": {
"type": "ChatInput",
"node": {
"template": {
"code": {
"type": "code",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Capture user inputs from the chat interface.\"\n icon = \"ChatInput\"\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "code",
"advanced": true,
"dynamic": true,
"info": "",
"load_from_db": false,
"title_case": false
},
"input_value": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "input_value",
"display_name": "Message",
"advanced": false,
"input_types": ["Text"],
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"value": "what is the egg?\n"
},
"return_record": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "return_record",
"display_name": "Return Record",
"advanced": true,
"dynamic": false,
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
"load_from_db": false,
"title_case": false
},
"sender": {
"type": "str",
"required": false,
"placeholder": "",
"list": true,
"show": true,
"multiline": false,
"value": "User",
"fileTypes": [],
"file_path": "",
"password": false,
"options": ["Machine", "User"],
"name": "sender",
"display_name": "Sender Type",
"advanced": true,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"sender_name": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": "User",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "sender_name",
"display_name": "Sender Name",
"advanced": false,
"dynamic": false,
"info": "",
"load_from_db": true,
"title_case": false,
"input_types": ["Text"]
},
"session_id": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "session_id",
"display_name": "Session ID",
"advanced": true,
"dynamic": false,
"info": "If provided, the message will be stored in the memory.",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"_type": "CustomComponent"
},
"description": "Capture user inputs from the chat interface.",
"icon": "ChatInput",
"base_classes": ["str", "Record", "Text", "object"],
"display_name": "Chat Input",
"documentation": "",
"custom_fields": {
"sender": null,
"sender_name": null,
"input_value": null,
"session_id": null,
"return_record": null
},
"output_types": ["Text", "Record"],
"field_formatters": {},
"frozen": false,
"field_order": [],
"beta": false
},
"id": "ChatInput-i8h9h"
},
"selected": false,
"width": 384,
"height": 399,
"positionAbsolute": { "x": -20.8061754985539, "y": 392.20173628530654 },
"dragging": false
},
{
"id": "OpenAIModel-4sAGl",
"type": "genericNode",
"position": { "x": 1149.435560286922, "y": -149.64066009487115 },
"data": {
"type": "OpenAIModel",
"node": {
"template": {
"input_value": {
"type": "str",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "input_value",
"display_name": "Input",
"advanced": false,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"code": {
"type": "code",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI's models.\"\n icon = \"OpenAI\"\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": False,\n \"required\": False,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n \"required\": False,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"required\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": False,\n \"required\": False,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"advanced\": False,\n \"required\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"required\": False,\n \"value\": 0.7,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": \"Stream the response from the model.\",\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n },\n }\n\n def build(\n self,\n input_value: Text,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n model_name: str = \"gpt-4-1106-preview\",\n openai_api_base: Optional[str] = None,\n openai_api_key: Optional[str] = None,\n temperature: float = 0.7,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "code",
"advanced": true,
"dynamic": true,
"info": "",
"load_from_db": false,
"title_case": false
},
"max_tokens": {
"type": "int",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": 256,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "max_tokens",
"display_name": "Max Tokens",
"advanced": false,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false
},
"model_kwargs": {
"type": "NestedDict",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": {},
"fileTypes": [],
"file_path": "",
"password": false,
"name": "model_kwargs",
"display_name": "Model Kwargs",
"advanced": true,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false
},
"model_name": {
"type": "str",
"required": false,
"placeholder": "",
"list": true,
"show": true,
"multiline": false,
"value": "gpt-4-1106-preview",
"fileTypes": [],
"file_path": "",
"password": false,
"options": [
"gpt-4-turbo-preview",
"gpt-4-0125-preview",
"gpt-4-1106-preview",
"gpt-4-vision-preview",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo-1106"
],
"name": "model_name",
"display_name": "Model Name",
"advanced": false,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"openai_api_base": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "openai_api_base",
"display_name": "OpenAI API Base",
"advanced": false,
"dynamic": false,
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"openai_api_key": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"fileTypes": [],
"file_path": "",
"password": true,
"name": "openai_api_key",
"display_name": "OpenAI API Key",
"advanced": false,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"stream": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": true,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "stream",
"display_name": "Stream",
"advanced": false,
"dynamic": false,
"info": "Stream the response from the model.",
"load_from_db": false,
"title_case": false
},
"system_message": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "system_message",
"display_name": "System Message",
"advanced": false,
"dynamic": false,
"info": "System message to pass to the model.",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"temperature": {
"type": "float",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": "0",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "temperature",
"display_name": "Temperature",
"advanced": false,
"dynamic": false,
"info": "",
"rangeSpec": { "min": -1, "max": 1, "step": 0.1 },
"load_from_db": false,
"title_case": false
},
"_type": "CustomComponent"
},
"description": "Generates text using OpenAI's models.",
"icon": "OpenAI",
"base_classes": ["str", "Text", "object"],
"display_name": "OpenAI",
"documentation": "",
"custom_fields": {
"input_value": null,
"max_tokens": null,
"model_kwargs": null,
"model_name": null,
"openai_api_base": null,
"openai_api_key": null,
"temperature": null,
"stream": null,
"system_message": null
},
"output_types": ["Text"],
"field_formatters": {},
"frozen": false,
"field_order": [],
"beta": false
},
"id": "OpenAIModel-4sAGl"
},
"selected": false,
"width": 384,
"height": 945,
"positionAbsolute": {
"x": 1149.435560286922,
"y": -149.64066009487115
},
"dragging": false
},
{
"id": "ChatOutput-gQm1o",
"type": "genericNode",
"position": { "x": 1733.3012915204283, "y": 168.76098809939327 },
"data": {
"type": "ChatOutput",
"node": {
"template": {
"code": {
"type": "code",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Used to send a chat message.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "code",
"advanced": true,
"dynamic": true,
"info": "",
"load_from_db": false,
"title_case": false
},
"input_value": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "input_value",
"display_name": "Message",
"advanced": false,
"input_types": ["Text"],
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false
},
"return_record": {
"type": "bool",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "return_record",
"display_name": "Return Record",
"advanced": true,
"dynamic": false,
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
"load_from_db": false,
"title_case": false
},
"sender": {
"type": "str",
"required": false,
"placeholder": "",
"list": true,
"show": true,
"multiline": false,
"value": "Machine",
"fileTypes": [],
"file_path": "",
"password": false,
"options": ["Machine", "User"],
"name": "sender",
"display_name": "Sender Type",
"advanced": true,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"sender_name": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"value": "AI",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "sender_name",
"display_name": "Sender Name",
"advanced": false,
"dynamic": false,
"info": "",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"session_id": {
"type": "str",
"required": false,
"placeholder": "",
"list": false,
"show": true,
"multiline": false,
"fileTypes": [],
"file_path": "",
"password": false,
"name": "session_id",
"display_name": "Session ID",
"advanced": true,
"dynamic": false,
"info": "If provided, the message will be stored in the memory.",
"load_from_db": false,
"title_case": false,
"input_types": ["Text"]
},
"_type": "CustomComponent"
},
"description": "Used to send a chat message.",
"icon": "ChatOutput",
"base_classes": ["str", "Record", "Text", "object"],
"display_name": "Chat Output",
"documentation": "",
"custom_fields": {
"sender": null,
"sender_name": null,
"input_value": null,
"session_id": null,
"return_record": null
},
"output_types": ["Text", "Record"],
"field_formatters": {},
"frozen": false,
"field_order": [],
"beta": false
},
"id": "ChatOutput-gQm1o"
},
"selected": false,
"width": 384,
"height": 399,
"positionAbsolute": {
"x": 1733.3012915204283,
"y": 168.76098809939327
},
"dragging": false
}
],
"edges": [
{
"source": "File-raM7G",
"sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-raM7Gœ}",
"target": "Prompt-vbqLJ",
"targetHandle": "{œfieldNameœ:œdocumentœ,œidœ:œPrompt-vbqLJœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}",
"data": {
"targetHandle": {
"fieldName": "document",
"id": "Prompt-vbqLJ",
"inputTypes": ["Document", "BaseOutputParser", "Text", "Record"],
"type": "str"
},
"sourceHandle": {
"baseClasses": ["Record"],
"dataType": "File",
"id": "File-raM7G"
}
},
"style": { "stroke": "#555" },
"className": "stroke-gray-900 stroke-connection",
"id": "reactflow__edge-File-raM7G{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-raM7Gœ}-Prompt-vbqLJ{œfieldNameœ:œdocumentœ,œidœ:œPrompt-vbqLJœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}"
},
{
"source": "ChatInput-i8h9h",
"sourceHandle": "{œbaseClassesœ:[œstrœ,œRecordœ,œTextœ,œobjectœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-i8h9hœ}",
"target": "Prompt-vbqLJ",
"targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-vbqLJœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}",
"data": {
"targetHandle": {
"fieldName": "question",
"id": "Prompt-vbqLJ",
"inputTypes": ["Document", "BaseOutputParser", "Text", "Record"],
"type": "str"
},
"sourceHandle": {
"baseClasses": ["str", "Record", "Text", "object"],
"dataType": "ChatInput",
"id": "ChatInput-i8h9h"
}
},
"style": { "stroke": "#555" },
"className": "stroke-gray-900 stroke-connection",
"id": "reactflow__edge-ChatInput-i8h9h{œbaseClassesœ:[œstrœ,œRecordœ,œTextœ,œobjectœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-i8h9hœ}-Prompt-vbqLJ{œfieldNameœ:œquestionœ,œidœ:œPrompt-vbqLJœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}"
},
{
"source": "Prompt-vbqLJ",
"sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-vbqLJœ}",
"target": "OpenAIModel-4sAGl",
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-4sAGlœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
"data": {
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-4sAGl",
"inputTypes": ["Text"],
"type": "str"
},
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"dataType": "Prompt",
"id": "Prompt-vbqLJ"
}
},
"style": { "stroke": "#555" },
"className": "stroke-gray-900 stroke-connection",
"id": "reactflow__edge-Prompt-vbqLJ{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-vbqLJœ}-OpenAIModel-4sAGl{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-4sAGlœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}"
},
{
"source": "OpenAIModel-4sAGl",
"sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-4sAGlœ}",
"target": "ChatOutput-gQm1o",
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-gQm1oœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
"data": {
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-gQm1o",
"inputTypes": ["Text"],
"type": "str"
},
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"dataType": "OpenAIModel",
"id": "OpenAIModel-4sAGl"
}
},
"style": { "stroke": "#555" },
"className": "stroke-gray-900 stroke-connection",
"id": "reactflow__edge-OpenAIModel-4sAGl{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-4sAGlœ}-ChatOutput-gQm1o{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-gQm1oœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}"
}
],
"viewport": {
"x": 253.88557335378164,
"y": 258.9093666453358,
"zoom": 0.3604176801056518
}
},
"description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.",
"name": "Document QA",
"last_tested_version": "1.0.0a0",
"is_component": false
}