Update .gitignore and refactor ChatInput and ChatOutput (#2140)

This pull request includes two changes. First, the `.gitignore` file is
updated to include `*.db-shm` and `*.db-wal` files. Second, the
`ChatInput` and `ChatOutput` classes are refactored to include a new
`return_message` parameter. This parameter allows the caller to specify
whether they want the message to be returned as a `Message` object or
just the message text. These changes improve the functionality and
maintainability of the code.
This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-06-11 12:04:32 -07:00 committed by GitHub
commit 6716a90c0f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 886 additions and 251 deletions

6
.gitignore vendored
View file

@ -266,7 +266,5 @@ stuff/*
src/frontend/playwright-report/index.html
*.bak
prof/*
.langchain.db-shm
.langchain.db-wal
langflow.db-shm
langflow.db-wal
*.db-shm
*.db-wal

View file

@ -29,9 +29,9 @@ class ChatComponent(CustomComponent):
"info": "If provided, the message will be stored in the memory.",
"advanced": True,
},
"return_record": {
"display_name": "Return Record",
"info": "Return the message as a record containing the sender, sender_name, and session_id.",
"return_message": {
"display_name": "Return Message",
"info": "Return the message as a Message containing the sender, sender_name, and session_id.",
"advanced": True,
},
"record_template": {
@ -68,6 +68,7 @@ class ChatComponent(CustomComponent):
input_value: Optional[Union[str, Record, Message]] = None,
files: Optional[list[str]] = None,
session_id: Optional[str] = None,
return_message: Optional[bool] = False,
) -> Message:
message: Message | None = None
@ -78,7 +79,12 @@ class ChatComponent(CustomComponent):
message = Message(
text=input_value, sender=sender, sender_name=sender_name, files=files, session_id=session_id
)
self.status = message
if not return_message:
message_text = message.text
else:
message_text = message
self.status = message_text
if session_id and isinstance(message, Message) and isinstance(message.text, str):
self.store_message(message)
return message
return message_text

View file

@ -28,6 +28,7 @@ class ChatInput(ChatComponent):
input_value: Optional[str] = None,
files: Optional[list[str]] = None,
session_id: Optional[str] = None,
return_message: Optional[bool] = False,
) -> Union[Message, Text]:
return super().build_with_record(
sender=sender,
@ -35,4 +36,5 @@ class ChatInput(ChatComponent):
input_value=input_value,
files=files,
session_id=session_id,
return_message=return_message,
)

View file

@ -1,6 +1,7 @@
from typing import Optional
from typing import Optional, Union
from langflow.base.io.chat import ChatComponent
from langflow.field_typing import Text
from langflow.schema.message import Message
@ -16,11 +17,13 @@ class ChatOutput(ChatComponent):
input_value: Optional[str] = None,
session_id: Optional[str] = None,
files: Optional[list[str]] = None,
) -> Message:
return_message: Optional[bool] = False,
) -> Union[Message, Text]:
return super().build_with_record(
sender=sender,
sender_name=sender_name,
input_value=input_value,
session_id=session_id,
files=files,
return_message=return_message,
)

View file

@ -5,14 +5,20 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "Text", "str"],
"baseClasses": [
"object",
"Text",
"str"
],
"dataType": "OpenAIModel",
"id": "OpenAIModel-k39HS"
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-njtka",
"inputTypes": ["Text"],
"inputTypes": [
"Text"
],
"type": "str"
}
},
@ -29,14 +35,22 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "str", "Text"],
"baseClasses": [
"object",
"str",
"Text"
],
"dataType": "Prompt",
"id": "Prompt-uxBqP"
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-k39HS",
"inputTypes": ["Text", "Record", "Prompt"],
"inputTypes": [
"Text",
"Record",
"Prompt"
],
"type": "str"
}
},
@ -53,14 +67,24 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "Record", "str", "Text"],
"baseClasses": [
"object",
"Record",
"str",
"Text"
],
"dataType": "ChatInput",
"id": "ChatInput-P3fgL"
},
"targetHandle": {
"fieldName": "user_input",
"id": "Prompt-uxBqP",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -81,10 +105,16 @@
"display_name": "Prompt",
"id": "Prompt-uxBqP",
"node": {
"base_classes": ["object", "str", "Text"],
"base_classes": [
"object",
"str",
"Text"
],
"beta": false,
"custom_fields": {
"template": ["user_input"]
"template": [
"user_input"
]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
@ -99,7 +129,9 @@
"is_input": null,
"is_output": null,
"name": "",
"output_types": ["Prompt"],
"output_types": [
"Prompt"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -127,7 +159,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -191,7 +225,11 @@
"display_name": "OpenAI",
"id": "OpenAIModel-k39HS",
"node": {
"base_classes": ["object", "Text", "str"],
"base_classes": [
"object",
"Text",
"str"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -221,7 +259,9 @@
],
"frozen": false,
"icon": "OpenAI",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -249,7 +289,11 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text", "Record", "Prompt"],
"input_types": [
"Text",
"Record",
"Prompt"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -306,7 +350,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -333,7 +379,9 @@
"fileTypes": [],
"file_path": "",
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -352,7 +400,9 @@
"fileTypes": [],
"file_path": "",
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": true,
"multiline": false,
@ -391,7 +441,9 @@
"fileTypes": [],
"file_path": "",
"info": "System message to pass to the model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -451,7 +503,12 @@
"data": {
"id": "ChatOutput-njtka",
"node": {
"base_classes": ["Record", "Text", "str", "object"],
"base_classes": [
"Record",
"Text",
"str",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -468,7 +525,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatOutput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -487,7 +547,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n )\n"
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -496,7 +556,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -515,12 +577,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -536,7 +603,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -556,7 +625,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -591,7 +662,12 @@
"data": {
"id": "ChatInput-P3fgL",
"node": {
"base_classes": ["object", "Record", "str", "Text"],
"base_classes": [
"object",
"Record",
"str",
"Text"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -607,7 +683,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatInput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -626,7 +705,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n )\n"
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import Text\nfrom typing import Union\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -655,12 +734,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -676,7 +760,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -696,7 +782,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -739,4 +827,4 @@
"is_component": false,
"last_tested_version": "1.0.0a4",
"name": "Basic Prompting (Hello, World)"
}
}

View file

@ -5,14 +5,21 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["Record"],
"baseClasses": [
"Record"
],
"dataType": "URL",
"id": "URL-HYPkR"
},
"targetHandle": {
"fieldName": "reference_2",
"id": "Prompt-Rse03",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -30,14 +37,20 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"baseClasses": [
"str",
"Text",
"object"
],
"dataType": "OpenAIModel",
"id": "OpenAIModel-gi29P"
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-JPlxl",
"inputTypes": ["Text"],
"inputTypes": [
"Text"
],
"type": "str"
}
},
@ -54,14 +67,21 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["Record"],
"baseClasses": [
"Record"
],
"dataType": "URL",
"id": "URL-2cX90"
},
"targetHandle": {
"fieldName": "reference_1",
"id": "Prompt-Rse03",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -78,14 +98,23 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "Text", "str"],
"baseClasses": [
"object",
"Text",
"str"
],
"dataType": "TextInput",
"id": "TextInput-og8Or"
},
"targetHandle": {
"fieldName": "instructions",
"id": "Prompt-Rse03",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -102,14 +131,22 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "Text", "str"],
"baseClasses": [
"object",
"Text",
"str"
],
"dataType": "Prompt",
"id": "Prompt-Rse03"
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-gi29P",
"inputTypes": ["Text", "Record", "Prompt"],
"inputTypes": [
"Text",
"Record",
"Prompt"
],
"type": "str"
}
},
@ -131,10 +168,18 @@
"display_name": "Prompt",
"id": "Prompt-Rse03",
"node": {
"base_classes": ["object", "Text", "str"],
"base_classes": [
"object",
"Text",
"str"
],
"beta": false,
"custom_fields": {
"template": ["reference_1", "reference_2", "instructions"]
"template": [
"reference_1",
"reference_2",
"instructions"
]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
@ -149,7 +194,9 @@
"is_input": null,
"is_output": null,
"name": "",
"output_types": ["Prompt"],
"output_types": [
"Prompt"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -255,7 +302,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -291,7 +340,9 @@
"data": {
"id": "URL-HYPkR",
"node": {
"base_classes": ["Record"],
"base_classes": [
"Record"
],
"beta": false,
"custom_fields": {
"urls": null
@ -303,7 +354,9 @@
"field_order": [],
"frozen": false,
"icon": "layout-template",
"output_types": ["Record"],
"output_types": [
"Record"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -331,7 +384,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -369,7 +424,12 @@
"data": {
"id": "ChatOutput-JPlxl",
"node": {
"base_classes": ["Text", "Record", "object", "str"],
"base_classes": [
"Text",
"Record",
"object",
"str"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -386,7 +446,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatOutput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -405,7 +468,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n )\n"
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -414,7 +477,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -433,12 +498,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -454,7 +524,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -474,7 +546,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -504,7 +578,11 @@
"data": {
"id": "OpenAIModel-gi29P",
"node": {
"base_classes": ["str", "Text", "object"],
"base_classes": [
"str",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -534,7 +612,9 @@
],
"frozen": false,
"icon": "OpenAI",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -562,7 +642,11 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text", "Record", "Prompt"],
"input_types": [
"Text",
"Record",
"Prompt"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -619,7 +703,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -646,7 +732,9 @@
"fileTypes": [],
"file_path": "",
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -665,7 +753,9 @@
"fileTypes": [],
"file_path": "",
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": true,
"multiline": false,
@ -704,7 +794,9 @@
"fileTypes": [],
"file_path": "",
"info": "System message to pass to the model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -764,7 +856,9 @@
"data": {
"id": "URL-2cX90",
"node": {
"base_classes": ["Record"],
"base_classes": [
"Record"
],
"beta": false,
"custom_fields": {
"urls": null
@ -776,7 +870,9 @@
"field_order": [],
"frozen": false,
"icon": "layout-template",
"output_types": ["Record"],
"output_types": [
"Record"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -804,7 +900,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -815,7 +913,9 @@
"show": true,
"title_case": false,
"type": "str",
"value": ["https://www.promptingguide.ai/introduction/basics"]
"value": [
"https://www.promptingguide.ai/introduction/basics"
]
}
}
},
@ -840,7 +940,11 @@
"data": {
"id": "TextInput-og8Or",
"node": {
"base_classes": ["object", "Text", "str"],
"base_classes": [
"object",
"Text",
"str"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -853,7 +957,9 @@
"field_order": [],
"frozen": false,
"icon": "type",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -881,7 +987,10 @@
"fileTypes": [],
"file_path": "",
"info": "Text or Record to be passed as input.",
"input_types": ["Record", "Text"],
"input_types": [
"Record",
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -901,7 +1010,9 @@
"fileTypes": [],
"file_path": "",
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -945,4 +1056,4 @@
"is_component": false,
"last_tested_version": "1.0.0a0",
"name": "Blog Writer"
}
}

View file

@ -5,14 +5,24 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Record", "Text", "object"],
"baseClasses": [
"str",
"Record",
"Text",
"object"
],
"dataType": "ChatInput",
"id": "ChatInput-MsSJ9"
},
"targetHandle": {
"fieldName": "Question",
"id": "Prompt-tHwPf",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -29,14 +39,21 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["Record"],
"baseClasses": [
"Record"
],
"dataType": "File",
"id": "File-6TEsD"
},
"targetHandle": {
"fieldName": "Document",
"id": "Prompt-tHwPf",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -53,14 +70,22 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "str", "Text"],
"baseClasses": [
"object",
"str",
"Text"
],
"dataType": "Prompt",
"id": "Prompt-tHwPf"
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-Bt067",
"inputTypes": ["Text", "Record", "Prompt"],
"inputTypes": [
"Text",
"Record",
"Prompt"
],
"type": "str"
}
},
@ -77,14 +102,20 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "str", "Text"],
"baseClasses": [
"object",
"str",
"Text"
],
"dataType": "OpenAIModel",
"id": "OpenAIModel-Bt067"
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-F5Awj",
"inputTypes": ["Text"],
"inputTypes": [
"Text"
],
"type": "str"
}
},
@ -105,10 +136,17 @@
"display_name": "Prompt",
"id": "Prompt-tHwPf",
"node": {
"base_classes": ["object", "str", "Text"],
"base_classes": [
"object",
"str",
"Text"
],
"beta": false,
"custom_fields": {
"template": ["Document", "Question"]
"template": [
"Document",
"Question"
]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
@ -123,7 +161,9 @@
"is_input": null,
"is_output": null,
"name": "",
"output_types": ["Prompt"],
"output_types": [
"Prompt"
],
"template": {
"Document": {
"advanced": false,
@ -203,7 +243,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -239,7 +281,9 @@
"data": {
"id": "File-6TEsD",
"node": {
"base_classes": ["Record"],
"base_classes": [
"Record"
],
"beta": false,
"custom_fields": {
"path": null,
@ -251,7 +295,9 @@
"field_formatters": {},
"field_order": [],
"frozen": false,
"output_types": ["Record"],
"output_types": [
"Record"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -345,7 +391,12 @@
"data": {
"id": "ChatInput-MsSJ9",
"node": {
"base_classes": ["str", "Record", "Text", "object"],
"base_classes": [
"str",
"Record",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -361,7 +412,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatInput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -380,7 +434,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n )\n"
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import Text\nfrom typing import Union\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -409,12 +463,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -430,7 +489,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -450,7 +511,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -485,7 +548,12 @@
"data": {
"id": "ChatOutput-F5Awj",
"node": {
"base_classes": ["str", "Record", "Text", "object"],
"base_classes": [
"str",
"Record",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -501,7 +569,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatOutput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -520,7 +591,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n )\n"
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -529,7 +600,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -548,12 +621,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -569,7 +647,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -589,7 +669,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -624,7 +706,11 @@
"data": {
"id": "OpenAIModel-Bt067",
"node": {
"base_classes": ["object", "str", "Text"],
"base_classes": [
"object",
"str",
"Text"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -654,7 +740,9 @@
],
"frozen": false,
"icon": "OpenAI",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -682,7 +770,11 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text", "Record", "Prompt"],
"input_types": [
"Text",
"Record",
"Prompt"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -739,7 +831,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -766,7 +860,9 @@
"fileTypes": [],
"file_path": "",
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -785,7 +881,9 @@
"fileTypes": [],
"file_path": "",
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": true,
"multiline": false,
@ -824,7 +922,9 @@
"fileTypes": [],
"file_path": "",
"info": "System message to pass to the model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -892,4 +992,4 @@
"is_component": false,
"last_tested_version": "1.0.0a0",
"name": "Document QA"
}
}

View file

@ -5,14 +5,23 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"baseClasses": [
"str",
"Text",
"object"
],
"dataType": "MemoryComponent",
"id": "MemoryComponent-cdA1J"
},
"targetHandle": {
"fieldName": "context",
"id": "Prompt-ODkUx",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -30,14 +39,24 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["Text", "object", "Record", "str"],
"baseClasses": [
"Text",
"object",
"Record",
"str"
],
"dataType": "ChatInput",
"id": "ChatInput-t7F8v"
},
"targetHandle": {
"fieldName": "user_message",
"id": "Prompt-ODkUx",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -55,14 +74,22 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["Text", "str", "object"],
"baseClasses": [
"Text",
"str",
"object"
],
"dataType": "Prompt",
"id": "Prompt-ODkUx"
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-9RykF",
"inputTypes": ["Text", "Record", "Prompt"],
"inputTypes": [
"Text",
"Record",
"Prompt"
],
"type": "str"
}
},
@ -79,14 +106,20 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "object", "Text"],
"baseClasses": [
"str",
"object",
"Text"
],
"dataType": "OpenAIModel",
"id": "OpenAIModel-9RykF"
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-P1jEe",
"inputTypes": ["Text"],
"inputTypes": [
"Text"
],
"type": "str"
}
},
@ -103,14 +136,21 @@
"className": "stroke-foreground stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"baseClasses": [
"str",
"Text",
"object"
],
"dataType": "MemoryComponent",
"id": "MemoryComponent-cdA1J"
},
"targetHandle": {
"fieldName": "input_value",
"id": "TextOutput-vrs6T",
"inputTypes": ["Record", "Text"],
"inputTypes": [
"Record",
"Text"
],
"type": "str"
}
},
@ -129,7 +169,12 @@
"data": {
"id": "ChatInput-t7F8v",
"node": {
"base_classes": ["Text", "object", "Record", "str"],
"base_classes": [
"Text",
"object",
"Record",
"str"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -145,7 +190,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatInput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -164,7 +212,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n )\n"
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import Text\nfrom typing import Union\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -193,12 +241,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -214,7 +267,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -234,7 +289,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -270,7 +327,12 @@
"data": {
"id": "ChatOutput-P1jEe",
"node": {
"base_classes": ["Text", "object", "Record", "str"],
"base_classes": [
"Text",
"object",
"Record",
"str"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -286,7 +348,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatOutput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -305,7 +370,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n )\n"
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -314,7 +379,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -333,12 +400,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -354,7 +426,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -374,7 +448,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -412,7 +488,11 @@
"display_name": "Chat Memory",
"id": "MemoryComponent-cdA1J",
"node": {
"base_classes": ["str", "Text", "object"],
"base_classes": [
"str",
"Text",
"object"
],
"beta": true,
"custom_fields": {
"n_messages": null,
@ -429,7 +509,9 @@
"field_order": [],
"frozen": false,
"icon": "history",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -476,12 +558,17 @@
"fileTypes": [],
"file_path": "",
"info": "Order of the messages.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "order",
"options": ["Ascending", "Descending"],
"options": [
"Ascending",
"Descending"
],
"password": false,
"placeholder": "",
"required": false,
@ -497,7 +584,9 @@
"fileTypes": [],
"file_path": "",
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -517,12 +606,18 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User", "Machine and User"],
"options": [
"Machine",
"User",
"Machine and User"
],
"password": false,
"placeholder": "",
"required": false,
@ -538,7 +633,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -557,7 +654,9 @@
"fileTypes": [],
"file_path": "",
"info": "Session ID of the chat history.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -595,10 +694,17 @@
"display_name": "Prompt",
"id": "Prompt-ODkUx",
"node": {
"base_classes": ["Text", "str", "object"],
"base_classes": [
"Text",
"str",
"object"
],
"beta": false,
"custom_fields": {
"template": ["context", "user_message"]
"template": [
"context",
"user_message"
]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
@ -613,7 +719,9 @@
"is_input": null,
"is_output": null,
"name": "",
"output_types": ["Prompt"],
"output_types": [
"Prompt"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -667,7 +775,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -729,7 +839,11 @@
"data": {
"id": "OpenAIModel-9RykF",
"node": {
"base_classes": ["str", "object", "Text"],
"base_classes": [
"str",
"object",
"Text"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -759,7 +873,9 @@
],
"frozen": false,
"icon": "OpenAI",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -787,7 +903,11 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text", "Record", "Prompt"],
"input_types": [
"Text",
"Record",
"Prompt"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -844,7 +964,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -871,7 +993,9 @@
"fileTypes": [],
"file_path": "",
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -890,7 +1014,9 @@
"fileTypes": [],
"file_path": "",
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": true,
"multiline": false,
@ -929,7 +1055,9 @@
"fileTypes": [],
"file_path": "",
"info": "System message to pass to the model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -989,7 +1117,11 @@
"data": {
"id": "TextOutput-vrs6T",
"node": {
"base_classes": ["str", "object", "Text"],
"base_classes": [
"str",
"object",
"Text"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -1002,7 +1134,9 @@
"field_order": [],
"frozen": false,
"icon": "type",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -1030,7 +1164,10 @@
"fileTypes": [],
"file_path": "",
"info": "Text or Record to be passed as output.",
"input_types": ["Record", "Text"],
"input_types": [
"Record",
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1050,7 +1187,9 @@
"fileTypes": [],
"file_path": "",
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1096,4 +1235,4 @@
"is_component": false,
"last_tested_version": "1.0.0a0",
"name": "Memory Chatbot"
}
}

View file

@ -5,14 +5,23 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"baseClasses": [
"str",
"Text",
"object"
],
"dataType": "TextInput",
"id": "TextInput-sptaH"
},
"targetHandle": {
"fieldName": "document",
"id": "Prompt-amqBu",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -29,14 +38,21 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "str", "Text"],
"baseClasses": [
"object",
"str",
"Text"
],
"dataType": "Prompt",
"id": "Prompt-amqBu"
},
"targetHandle": {
"fieldName": "input_value",
"id": "TextOutput-2MS4a",
"inputTypes": ["Record", "Text"],
"inputTypes": [
"Record",
"Text"
],
"type": "str"
}
},
@ -53,14 +69,22 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "str", "Text"],
"baseClasses": [
"object",
"str",
"Text"
],
"dataType": "Prompt",
"id": "Prompt-amqBu"
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-uYXZJ",
"inputTypes": ["Text", "Record", "Prompt"],
"inputTypes": [
"Text",
"Record",
"Prompt"
],
"type": "str"
}
},
@ -77,14 +101,23 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"baseClasses": [
"str",
"Text",
"object"
],
"dataType": "OpenAIModel",
"id": "OpenAIModel-uYXZJ"
},
"targetHandle": {
"fieldName": "summary",
"id": "Prompt-gTNiz",
"inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
"inputTypes": [
"Document",
"BaseOutputParser",
"Record",
"Text"
],
"type": "str"
}
},
@ -101,14 +134,20 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"baseClasses": [
"str",
"Text",
"object"
],
"dataType": "OpenAIModel",
"id": "OpenAIModel-uYXZJ"
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-EJkG3",
"inputTypes": ["Text"],
"inputTypes": [
"Text"
],
"type": "str"
}
},
@ -125,14 +164,21 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "str", "Text"],
"baseClasses": [
"object",
"str",
"Text"
],
"dataType": "Prompt",
"id": "Prompt-gTNiz"
},
"targetHandle": {
"fieldName": "input_value",
"id": "TextOutput-MUDOR",
"inputTypes": ["Record", "Text"],
"inputTypes": [
"Record",
"Text"
],
"type": "str"
}
},
@ -149,14 +195,22 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["object", "str", "Text"],
"baseClasses": [
"object",
"str",
"Text"
],
"dataType": "Prompt",
"id": "Prompt-gTNiz"
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-XawYB",
"inputTypes": ["Text", "Record", "Prompt"],
"inputTypes": [
"Text",
"Record",
"Prompt"
],
"type": "str"
}
},
@ -173,14 +227,20 @@
"className": "stroke-gray-900 stroke-connection",
"data": {
"sourceHandle": {
"baseClasses": ["str", "Text", "object"],
"baseClasses": [
"str",
"Text",
"object"
],
"dataType": "OpenAIModel",
"id": "OpenAIModel-XawYB"
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-DNmvg",
"inputTypes": ["Text"],
"inputTypes": [
"Text"
],
"type": "str"
}
},
@ -201,10 +261,16 @@
"display_name": "Prompt",
"id": "Prompt-amqBu",
"node": {
"base_classes": ["object", "str", "Text"],
"base_classes": [
"object",
"str",
"Text"
],
"beta": false,
"custom_fields": {
"template": ["document"]
"template": [
"document"
]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
@ -219,7 +285,9 @@
"is_input": null,
"is_output": null,
"name": "",
"output_types": ["Prompt"],
"output_types": [
"Prompt"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -273,7 +341,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -311,10 +381,16 @@
"display_name": "Prompt",
"id": "Prompt-gTNiz",
"node": {
"base_classes": ["object", "str", "Text"],
"base_classes": [
"object",
"str",
"Text"
],
"beta": false,
"custom_fields": {
"template": ["summary"]
"template": [
"summary"
]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
@ -329,7 +405,9 @@
"is_input": null,
"is_output": null,
"name": "",
"output_types": ["Prompt"],
"output_types": [
"Prompt"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -383,7 +461,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -415,7 +495,12 @@
"data": {
"id": "ChatOutput-EJkG3",
"node": {
"base_classes": ["object", "Record", "Text", "str"],
"base_classes": [
"object",
"Record",
"Text",
"str"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -432,7 +517,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatOutput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -451,7 +539,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n )\n"
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -460,7 +548,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -479,12 +569,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -500,7 +595,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -520,7 +617,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -551,7 +650,12 @@
"data": {
"id": "ChatOutput-DNmvg",
"node": {
"base_classes": ["object", "Record", "Text", "str"],
"base_classes": [
"object",
"Record",
"Text",
"str"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -568,7 +672,10 @@
"field_order": [],
"frozen": false,
"icon": "ChatOutput",
"output_types": ["Message"],
"output_types": [
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -587,7 +694,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n )\n"
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -596,7 +703,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -615,12 +724,17 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
"name": "sender",
"options": ["Machine", "User"],
"options": [
"Machine",
"User"
],
"password": false,
"placeholder": "",
"required": false,
@ -636,7 +750,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -656,7 +772,9 @@
"fileTypes": [],
"file_path": "",
"info": "If provided, the message will be stored in the memory.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -686,7 +804,11 @@
"data": {
"id": "TextInput-sptaH",
"node": {
"base_classes": ["str", "Text", "object"],
"base_classes": [
"str",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -699,7 +821,9 @@
"field_order": [],
"frozen": false,
"icon": "type",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -727,7 +851,10 @@
"fileTypes": [],
"file_path": "",
"info": "Text or Record to be passed as input.",
"input_types": ["Record", "Text"],
"input_types": [
"Record",
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -747,7 +874,9 @@
"fileTypes": [],
"file_path": "",
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -783,7 +912,11 @@
"data": {
"id": "TextOutput-2MS4a",
"node": {
"base_classes": ["str", "Text", "object"],
"base_classes": [
"str",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -796,7 +929,9 @@
"field_order": [],
"frozen": false,
"icon": "type",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -824,7 +959,10 @@
"fileTypes": [],
"file_path": "",
"info": "Text or Record to be passed as output.",
"input_types": ["Record", "Text"],
"input_types": [
"Record",
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -844,7 +982,9 @@
"fileTypes": [],
"file_path": "",
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -880,7 +1020,11 @@
"data": {
"id": "OpenAIModel-uYXZJ",
"node": {
"base_classes": ["str", "Text", "object"],
"base_classes": [
"str",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -910,7 +1054,9 @@
],
"frozen": false,
"icon": "OpenAI",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -938,7 +1084,11 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text", "Record", "Prompt"],
"input_types": [
"Text",
"Record",
"Prompt"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -995,7 +1145,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -1022,7 +1174,9 @@
"fileTypes": [],
"file_path": "",
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1041,7 +1195,9 @@
"fileTypes": [],
"file_path": "",
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": true,
"multiline": false,
@ -1080,7 +1236,9 @@
"fileTypes": [],
"file_path": "",
"info": "System message to pass to the model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1140,7 +1298,11 @@
"data": {
"id": "TextOutput-MUDOR",
"node": {
"base_classes": ["str", "Text", "object"],
"base_classes": [
"str",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -1153,7 +1315,9 @@
"field_order": [],
"frozen": false,
"icon": "type",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -1181,7 +1345,10 @@
"fileTypes": [],
"file_path": "",
"info": "Text or Record to be passed as output.",
"input_types": ["Record", "Text"],
"input_types": [
"Record",
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1201,7 +1368,9 @@
"fileTypes": [],
"file_path": "",
"info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1237,7 +1406,11 @@
"data": {
"id": "OpenAIModel-XawYB",
"node": {
"base_classes": ["str", "Text", "object"],
"base_classes": [
"str",
"Text",
"object"
],
"beta": false,
"custom_fields": {
"input_value": null,
@ -1267,7 +1440,9 @@
],
"frozen": false,
"icon": "OpenAI",
"output_types": ["Text"],
"output_types": [
"Text"
],
"template": {
"_type": "CustomComponent",
"code": {
@ -1295,7 +1470,11 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text", "Record", "Prompt"],
"input_types": [
"Text",
"Record",
"Prompt"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1352,7 +1531,9 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": true,
"load_from_db": false,
"multiline": false,
@ -1379,7 +1560,9 @@
"fileTypes": [],
"file_path": "",
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1398,7 +1581,9 @@
"fileTypes": [],
"file_path": "",
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1437,7 +1622,9 @@
"fileTypes": [],
"file_path": "",
"info": "System message to pass to the model.",
"input_types": ["Text"],
"input_types": [
"Text"
],
"list": false,
"load_from_db": false,
"multiline": false,
@ -1505,4 +1692,4 @@
"is_component": false,
"last_tested_version": "1.0.0a0",
"name": "Prompt Chaining"
}
}

View file

@ -332,7 +332,8 @@
"frozen": false,
"icon": "ChatInput",
"output_types": [
"Message"
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
@ -352,7 +353,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n )\n"
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import Text\nfrom typing import Union\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Text\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,
@ -1530,7 +1531,8 @@
"frozen": false,
"icon": "ChatOutput",
"output_types": [
"Message"
"Message",
"Text"
],
"template": {
"_type": "CustomComponent",
@ -1550,7 +1552,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from typing import Optional\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n ) -> Message:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n )\n"
"value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n files: Optional[list[str]] = None,\n return_message: Optional[bool] = False,\n ) -> Union[Message, Text]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n files=files,\n return_message=return_message,\n )\n"
},
"input_value": {
"advanced": false,

View file

@ -4,7 +4,6 @@ from uuid import UUID, uuid4
import pytest
from fastapi import status
from fastapi.testclient import TestClient
from langflow.custom.directory_reader.directory_reader import DirectoryReader
from langflow.services.deps import get_settings_service
@ -448,9 +447,9 @@ def test_successful_run_no_payload(client, starter_project, created_api_key):
assert all(["ChatOutput" in _id for _id in ids])
display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")]
assert all([name in display_names for name in ["Chat Output"]])
inner_results = [output.get("results").get("text") for output in outputs_dict.get("outputs")]
inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")]
assert all([result is not None for result in inner_results]), inner_results
assert all([result is not None for result in inner_results]), outputs_dict.get("outputs")
def test_successful_run_with_output_type_text(client, starter_project, created_api_key):
@ -478,7 +477,7 @@ def test_successful_run_with_output_type_text(client, starter_project, created_a
assert all(["ChatOutput" in _id for _id in ids]), ids
display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")]
assert all([name in display_names for name in ["Chat Output"]]), display_names
inner_results = [output.get("results").get("text") for output in outputs_dict.get("outputs")]
inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")]
expected_result = ""
assert all([expected_result in result for result in inner_results]), inner_results
@ -509,7 +508,7 @@ def test_successful_run_with_output_type_any(client, starter_project, created_ap
assert all(["ChatOutput" in _id or "TextOutput" in _id for _id in ids]), ids
display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")]
assert all([name in display_names for name in ["Chat Output"]]), display_names
inner_results = [output.get("results").get("text") for output in outputs_dict.get("outputs")]
inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")]
expected_result = ""
assert all([expected_result in result for result in inner_results]), inner_results
@ -567,7 +566,7 @@ def test_successful_run_with_input_type_text(client, starter_project, created_ap
text_input_outputs = [output for output in outputs_dict.get("outputs") if "TextInput" in output.get("component_id")]
assert len(text_input_outputs) == 0
# Now we check if the input_value is correct
assert all([output.get("results").get("text") == "value1" for output in text_input_outputs]), text_input_outputs
assert all([output.get("results").get("result") == "value1" for output in text_input_outputs]), text_input_outputs
# Now do the same for "chat" input type
@ -598,7 +597,7 @@ def test_successful_run_with_input_type_chat(client, starter_project, created_ap
chat_input_outputs = [output for output in outputs_dict.get("outputs") if "ChatInput" in output.get("component_id")]
assert len(chat_input_outputs) == 1
# Now we check if the input_value is correct
assert all([output.get("results").get("text") == "value1" for output in chat_input_outputs]), chat_input_outputs
assert all([output.get("results").get("result") == "value1" for output in chat_input_outputs]), chat_input_outputs
def test_successful_run_with_input_type_any(client, starter_project, created_api_key):
@ -632,7 +631,7 @@ def test_successful_run_with_input_type_any(client, starter_project, created_api
]
assert len(any_input_outputs) == 1
# Now we check if the input_value is correct
assert all([output.get("results").get("text") == "value1" for output in any_input_outputs]), any_input_outputs
assert all([output.get("results").get("result") == "value1" for output in any_input_outputs]), any_input_outputs
@pytest.mark.api_key_required