Update inputs now that extras are forbid

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-06-19 18:53:02 -03:00
commit a9f6142820
20 changed files with 53 additions and 61 deletions

View file

@ -1,7 +1,7 @@
import json
from langflow.custom import Component
from langflow.io import Output, TextInput
from langflow.io import MultilineInput, Output
from langflow.schema import Data
@ -10,11 +10,10 @@ class WebhookComponent(Component):
description = "Defines a webhook input for the flow."
inputs = [
TextInput(
MultilineInput(
name="data",
display_name="Data",
info="Use this field to quickly test the webhook component by providing a JSON payload.",
multiline=True,
)
]
outputs = [

View file

@ -1,7 +1,5 @@
from typing import Any, List, Optional
from loguru import logger
from langflow.base.flow_processing.utils import build_data_from_result_data
from langflow.custom import CustomComponent
from langflow.graph.graph.base import Graph
@ -11,6 +9,7 @@ from langflow.helpers.flow import get_flow_inputs
from langflow.schema import Data
from langflow.schema.dotdict import dotdict
from langflow.template.field.base import Input
from loguru import logger
class SubFlowComponent(CustomComponent):
@ -63,7 +62,7 @@ class SubFlowComponent(CustomComponent):
name=vertex.id,
info=vertex.description,
field_type="str",
default=None,
value=None,
)
new_fields.append(field)
logger.debug(new_fields)

View file

@ -23,7 +23,7 @@ class CombineTextComponent(Component):
name="delimiter",
display_name="Delimiter",
info="A string used to separate the two text inputs. Defaults to a whitespace.",
default=" ",
value=" ",
),
]

View file

@ -1,6 +1,6 @@
from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES
from langflow.base.io.chat import ChatComponent
from langflow.io import DropdownInput, FileInput, Output, TextInput
from langflow.io import DropdownInput, FileInput, MultilineInput, Output, TextInput
from langflow.schema.message import Message
@ -10,10 +10,9 @@ class ChatInput(ChatComponent):
icon = "ChatInput"
inputs = [
TextInput(
MultilineInput(
name="input_value",
display_name="Text",
multiline=True,
value="",
info="Message to be passed as input.",
),
@ -27,15 +26,12 @@ class ChatInput(ChatComponent):
),
TextInput(
name="sender_name",
type=str,
display_name="Sender Name",
info="Name of the sender.",
value="User",
advanced=True,
),
TextInput(
name="session_id", type=str, display_name="Session ID", info="Session ID for the message.", advanced=True
),
TextInput(name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True),
FileInput(
name="files",
display_name="Files",

View file

@ -1,7 +1,6 @@
from typing import Optional
from langchain_community.chat_models.vertexai import ChatVertexAI
from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
@ -16,7 +15,7 @@ class ChatVertexAIComponent(CustomComponent):
"credentials": {
"display_name": "Credentials",
"field_type": "file",
"file_types": [".json"],
"file_types": ["json"],
"file_path": None,
},
"examples": {

View file

@ -1,7 +1,6 @@
from typing import Dict, Optional
from langchain_community.llms.vertexai import VertexAI
from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
@ -16,7 +15,7 @@ class VertexAIComponent(CustomComponent):
"credentials": {
"display_name": "Credentials",
"field_type": "file",
"file_types": [".json"],
"file_types": ["json"],
"required": False,
"value": None,
},

View file

@ -4,7 +4,7 @@ from pydantic.v1 import SecretStr
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import BaseLanguageModel, Text
from langflow.io import BoolInput, DropdownInput, FloatInput, IntInput, MessageInput, Output, StrInput
from langflow.io import BoolInput, DropdownInput, FloatInput, IntInput, MessageInput, Output, StrInput, SecretStrInput
class AzureChatOpenAIComponent(LCModelComponent):
@ -56,7 +56,7 @@ class AzureChatOpenAIComponent(LCModelComponent):
value=AZURE_OPENAI_API_VERSIONS[-1],
advanced=True,
),
StrInput(name="api_key", display_name="API Key", password=True),
SecretStrInput(name="api_key", display_name="API Key", password=True),
FloatInput(name="temperature", display_name="Temperature", value=0.7),
IntInput(
name="max_tokens",

View file

@ -4,7 +4,7 @@ from pydantic.v1 import SecretStr
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import BaseLanguageModel, Text
from langflow.io import BoolInput, FloatInput, Output, SecretStrInput, StrInput
from langflow.io import BoolInput, FloatInput, Output, SecretStrInput, StrInput, DropdownInput
class QianfanChatEndpointComponent(LCModelComponent):
@ -19,7 +19,7 @@ class QianfanChatEndpointComponent(LCModelComponent):
display_name="Input",
input_types=["Text", "Data", "Prompt"],
),
StrInput(
DropdownInput(
name="model",
display_name="Model Name",
options=[

View file

@ -1,11 +1,20 @@
from typing import Optional
from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import BaseLanguageModel
from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageInput, Output, StrInput
from langflow.io import (
BoolInput,
DictInput,
DropdownInput,
FloatInput,
IntInput,
MessageInput,
Output,
SecretStrInput,
StrInput,
)
from langflow.schema.message import Message
@ -24,12 +33,11 @@ class ChatLiteLLMModelComponent(LCModelComponent):
required=True,
info="The name of the model to use. For example, `gpt-3.5-turbo`.",
),
StrInput(
SecretStrInput(
name="api_key",
display_name="API key",
advanced=False,
required=False,
password=True,
),
DropdownInput(
name="provider",
@ -49,14 +57,14 @@ class ChatLiteLLMModelComponent(LCModelComponent):
display_name="Temperature",
advanced=False,
required=False,
default=0.7,
value=0.7,
),
DictInput(
name="model_kwargs",
display_name="Model kwargs",
advanced=True,
required=False,
default={},
value={},
),
FloatInput(
name="top_p",
@ -77,13 +85,13 @@ class ChatLiteLLMModelComponent(LCModelComponent):
required=False,
info="Number of chat completions to generate for each prompt. "
"Note that the API may not return the full n completions if duplicates are generated.",
default=1,
value=1,
),
IntInput(
name="max_tokens",
display_name="Max tokens",
advanced=False,
default=256,
value=256,
info="The maximum number of tokens to generate for each chat completion.",
),
IntInput(
@ -91,14 +99,14 @@ class ChatLiteLLMModelComponent(LCModelComponent):
display_name="Max retries",
advanced=True,
required=False,
default=6,
value=6,
),
BoolInput(
name="verbose",
display_name="Verbose",
advanced=True,
required=False,
default=False,
value=False,
),
BoolInput(
name="stream",
@ -179,3 +187,4 @@ class ChatLiteLLMModelComponent(LCModelComponent):
)
return output
return output

View file

@ -1,9 +1,8 @@
from langchain_google_vertexai import ChatVertexAI
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import BaseLanguageModel, Text
from langflow.io import BoolInput, FloatInput, IntInput, MessageInput, Output, StrInput
from langflow.io import BoolInput, FileInput, FloatInput, IntInput, MessageInput, MultilineInput, Output, StrInput
class ChatVertexAIComponent(LCModelComponent):
@ -13,19 +12,18 @@ class ChatVertexAIComponent(LCModelComponent):
inputs = [
MessageInput(name="input_value", display_name="Input"),
StrInput(
FileInput(
name="credentials",
display_name="Credentials",
info="Path to the JSON file containing the credentials.",
file_types=[".json"],
file_types=["json"],
advanced=True,
),
StrInput(name="project", display_name="Project", info="The project ID."),
StrInput(
MultilineInput(
name="examples",
display_name="Examples",
info="Examples to pass to the model.",
multiline=True,
advanced=True,
),
StrInput(name="location", display_name="Location", value="us-central1", advanced=True),

View file

@ -2,10 +2,9 @@ from typing import List
from langchain_community.vectorstores import Cassandra
from langchain_core.retrievers import BaseRetriever
from langflow.custom import Component
from langflow.helpers.data import docs_to_data
from langflow.io import BoolInput, DropdownInput, HandleInput, IntInput, Output, StrInput
from langflow.io import BoolInput, DropdownInput, HandleInput, IntInput, Output, SecretStrInput, StrInput
from langflow.schema import Data
@ -16,11 +15,10 @@ class CassandraVectorStoreComponent(Component):
icon = "Cassandra"
inputs = [
StrInput(
SecretStrInput(
name="token",
display_name="Token",
info="Authentication token for accessing Cassandra on Astra DB.",
password=True,
required=True,
),
StrInput(name="database_id", display_name="Database ID", info="The Astra database ID.", required=True),

View file

@ -3,10 +3,9 @@ from typing import List
from langchain_community.vectorstores import CouchbaseVectorStore
from langchain_core.retrievers import BaseRetriever
from langflow.custom import Component
from langflow.helpers.data import docs_to_data
from langflow.io import BoolInput, HandleInput, IntInput, Output, StrInput
from langflow.io import BoolInput, HandleInput, IntInput, Output, SecretStrInput, StrInput
from langflow.schema import Data
@ -19,7 +18,7 @@ class CouchbaseVectorStoreComponent(Component):
inputs = [
StrInput(name="couchbase_connection_string", display_name="Couchbase Cluster connection string", required=True),
StrInput(name="couchbase_username", display_name="Couchbase username", required=True),
StrInput(name="couchbase_password", display_name="Couchbase password", password=True, required=True),
SecretStrInput(name="couchbase_password", display_name="Couchbase password", required=True),
StrInput(name="bucket_name", display_name="Bucket Name", required=True),
StrInput(name="scope_name", display_name="Scope Name", required=True),
StrInput(name="collection_name", display_name="Collection Name", required=True),

View file

@ -2,10 +2,9 @@ from typing import List
from langchain_core.retrievers import BaseRetriever
from langchain_pinecone import Pinecone
from langflow.custom import Component
from langflow.helpers.data import docs_to_data
from langflow.io import BoolInput, DropdownInput, HandleInput, IntInput, Output, StrInput
from langflow.io import BoolInput, DropdownInput, HandleInput, IntInput, Output, SecretStrInput, StrInput
from langflow.schema import Data
@ -25,7 +24,7 @@ class PineconeVectorStoreComponent(Component):
value="Cosine",
advanced=True,
),
StrInput(name="pinecone_api_key", display_name="Pinecone API Key", password=True, required=True),
SecretStrInput(name="pinecone_api_key", display_name="Pinecone API Key", required=True),
HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]),
StrInput(
name="text_key",

View file

@ -2,10 +2,9 @@ from typing import List
from langchain_community.vectorstores import Qdrant
from langchain_core.retrievers import BaseRetriever
from langflow.custom import Component
from langflow.helpers.data import docs_to_data
from langflow.io import BoolInput, DropdownInput, HandleInput, IntInput, Output, StrInput
from langflow.io import BoolInput, DropdownInput, HandleInput, IntInput, Output, SecretStrInput, StrInput
from langflow.schema import Data
@ -20,7 +19,7 @@ class QdrantVectorStoreComponent(Component):
StrInput(name="host", display_name="Host", value="localhost", advanced=True),
IntInput(name="port", display_name="Port", value=6333, advanced=True),
IntInput(name="grpc_port", display_name="gRPC Port", value=6334, advanced=True),
StrInput(name="api_key", display_name="API Key", password=True, advanced=True),
SecretStrInput(name="api_key", display_name="API Key", advanced=True),
StrInput(name="prefix", display_name="Prefix", advanced=True),
IntInput(name="timeout", display_name="Timeout", advanced=True),
StrInput(name="path", display_name="Path", advanced=True),

View file

@ -3,10 +3,9 @@ from typing import List
from langchain_community.embeddings import FakeEmbeddings
from langchain_community.vectorstores import Vectara
from langchain_core.retrievers import BaseRetriever
from langflow.custom import Component
from langflow.helpers.data import docs_to_data
from langflow.io import BoolInput, HandleInput, IntInput, Output, StrInput
from langflow.io import BoolInput, HandleInput, IntInput, Output, SecretStrInput, StrInput
from langflow.schema import Data
@ -19,7 +18,7 @@ class VectaraVectorStoreComponent(Component):
inputs = [
StrInput(name="vectara_customer_id", display_name="Vectara Customer ID", required=True),
StrInput(name="vectara_corpus_id", display_name="Vectara Corpus ID", required=True),
StrInput(name="vectara_api_key", display_name="Vectara API Key", password=True, required=True),
SecretStrInput(name="vectara_api_key", display_name="Vectara API Key", required=True),
HandleInput(
name="vector_store_inputs",
display_name="Vector Store Inputs",

View file

@ -3,10 +3,9 @@ from typing import List
import weaviate
from langchain_community.vectorstores import Weaviate
from langchain_core.retrievers import BaseRetriever
from langflow.custom import Component
from langflow.helpers.data import docs_to_data
from langflow.io import BoolInput, HandleInput, IntInput, Output, StrInput
from langflow.io import BoolInput, HandleInput, IntInput, Output, SecretStrInput, StrInput
from langflow.schema import Data
@ -18,7 +17,7 @@ class WeaviateVectorStoreComponent(Component):
inputs = [
StrInput(name="url", display_name="Weaviate URL", value="http://localhost:8080", required=True),
StrInput(name="api_key", display_name="API Key", password=True, required=False),
SecretStrInput(name="api_key", display_name="API Key", required=False),
StrInput(name="index_name", display_name="Index Name", required=True),
StrInput(name="text_key", display_name="Text Key", value="text", advanced=True),
HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]),

View file

@ -448,7 +448,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n TextInput(\n name=\"input_value\",\n display_name=\"Text\",\n multiline=True,\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n type=str,\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MultilineInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
},
"input_value": {
"advanced": false,

View file

@ -326,7 +326,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n TextInput(\n name=\"input_value\",\n display_name=\"Text\",\n multiline=True,\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n type=str,\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MultilineInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
},
"input_value": {
"advanced": false,

View file

@ -502,7 +502,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n TextInput(\n name=\"input_value\",\n display_name=\"Text\",\n multiline=True,\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n type=str,\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MultilineInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
},
"files": {
"advanced": true,

View file

@ -363,7 +363,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n TextInput(\n name=\"input_value\",\n display_name=\"Text\",\n multiline=True,\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n type=str,\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(\n name=\"session_id\", type=str, display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, FileInput, MultilineInput, Output, TextInput\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n TextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n TextInput(name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and isinstance(message.text, str):\n self.store_message(message)\n self.message.value = message\n\n self.status = message\n return message\n"
},
"input_value": {
"advanced": false,