From 908c141d972118286d682124ce53ced16a4140ea Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 15 Oct 2024 16:09:12 -0300 Subject: [PATCH] feature: get messages from messages table for the playground (#3874) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: Update MessageBase text attribute based on isinstance check. * feat: Add update_message function to update a message in the database. * refactor(chat): Update imports and remove unnecessary config method in ChatComponent. * refactor: Add stream_message method to ChatComponent. * refactor: Update method call in ChatOutput component. * feat: Add callback function to custom component and update build_results signature. * feat: Add callback parameter to instantiate_class function. * feat(graph): Add callback functions for sync and async operations. * feat: Add callback function support to vertex build process. * feat: Add handling for added message in InterfaceVertex class. * feat: Add callback support to Graph methods. * feat(chat): Add callback function to build_vertices function. * refactor: Simplify update_message function and use session_scope for session management. * fix: Call set_callback method if available on custom component. * refactor(chat): Update chat message chunk handling and ID conversion. * feat: Add null check before setting cache in build_vertex_stream function. * refactor: Fix send_event_wrapper function and add callback parameter to _build_vertex function. * refactor: Simplify conditional statement and import order in ChatOutput. * [autofix.ci] apply automated fixes * refactor: move log method to Component class. * refactor: Simplify CallbackFunction definition. * feat: Initialize _current_output attribute in Component class. * feat: store current output name in custom component during processing. * feat: Add current output and component ID to log data. * fix: Add condition to check current output before invoking callback. * refactor: Update callback to log_callback in graph methods. * feat: Add test for callback graph execution with log messages. * update projects * fix(chat.py): fix condition to check if message text is a string before updating message text in the database * refactor(ChatOutput.py): update ChatOutput class to correctly store and assign the message value to ensure consistency and avoid potential bugs * refactor(chat.py): update return type of store_message method to return a single Message object instead of a list of Messages refactor(chat.py): update logic to correctly handle updating and returning a single stored message object instead of a list of messages * update starter projects * refactor(component.py): update type hint for name parameter in log method to be more explicit * feat: Add EventManager class for managing events and event registration * refactor: Update log_callback to event_manager in custom component classes * refactor(component.py): rename _log_callback to _event_manager and update method call to on_log for better clarity and consistency * refactor(chat.py): rename _log_callback method to _event_manager.on_token for clarity and consistency in method naming * refactor: Rename log_callback to event_manager for clarity and consistency * refactor: Update Vertex class to use EventManager instead of log_callback for better clarity and consistency * refactor: update build_flow to use EventManager * refactor: Update EventManager class to use Protocol for event callbacks * if event_type is not passed, it uses the default send_event * Add method to register event functions in EventManager - Introduced `register_event_function` method to allow passing custom event functions. - Updated `noop` method to accept `event_type` parameter. - Adjusted `__getattr__` to return `EventCallback` type. * update test_callback_graph * Add unit tests for EventManager in test_event_manager.py - Added tests for event registration, including default event type, empty string names, and specific event types. - Added tests for custom event functions and unregistered event access. - Added tests for event sending, including JSON formatting, empty data, and large payloads. - Added tests for handling JSON serialization errors and the noop function. * feat: Add callback function support to vertex build process. * feat: Add callback support to Graph methods. * feat(chat): Add callback function to build_vertices function. * [autofix.ci] apply automated fixes * refactor: Update callback to log_callback in graph methods. * fetching data from messages and builds at the same time, need to remove duplicates * refactor: Sort chat history by timestamp in ChatView component * fix: update serialization and improve error handling (#3516) * feat(utils): add support for V1BaseModel in serialize_field Add support for V1BaseModel instances in the serialize_field function by checking for a "to_json" method. If the method is not present, return the attribute values as a dictionary. * refactor: Update field serializer function and error handling in build_flow function * remove use memo to prevent bugs * feat: add updateMessagePartial method to MessagesStoreType * feat: update message partially in MessagesStoreType This commit adds the `updateMessagePartial` method to the `MessagesStoreType` in `messagesStore.ts`. This method allows updating a specific message by merging the changes with the existing message object. * feat: add log callback for start message in ChatComponent * feat: update log_callback name * feat: add log_callback for message in ChatComponent that are not streaming * refactor: remove console.log statement in buildFlowVertices function * refactor: store message in ChatInput after updating flow_id This commit refactors the `ChatInput` component by moving the logic to store the message after updating the `flow_id` property. This ensures that the message is properly stored in the correct flow. The previous implementation had the logic to store the message before updating the `flow_id`, which could lead to incorrect storage of messages. This change improves the reliability and accuracy of message storage in the `ChatInput` component. * refactor: move message storage logic in ChatInput after updating flow_id * refactor: update ChatComponent to use stored_message.id instead of self.graph.flow_id Update the `ChatComponent` class in `chat.py` to use the `stored_message.id` property instead of `self.graph.flow_id` when logging a message. This ensures that the correct message ID is used for logging purposes. The previous implementation used the flow ID, which could lead to incorrect logging. This change improves the accuracy of message logging in the `ChatComponent`. * refactor: remove unused code and console.log statements * raw: temp serializer fix * streaming working but the message comes in one shot * refactor: optimize message update in useMessagesStore Improve the efficiency of updating messages in the `useMessagesStore` function of `messagesStore.ts`. Instead of iterating through the entire message list, this refactor searches for the message to update by iterating backwards from the end. This approach allows for faster message retrieval and update. The code has been modified to use a for loop and break out of the loop once the message is found. This change enhances the performance of the message update process. * Refactor `serialize_flow_id` method to correctly handle UUID serialization in `message.py` * Refactor `send_event` method to use `jsonable_encoder` for data serialization * refactor: optimize message update in useMessagesStore * streaming working with timeout * refactor: update buildUtils.ts to use data instead of data.data in addMessage function * version with reactState for chatHistory * refactor: update on_message method in ChatComponent * refactor: update on_message method in ChatComponent * refactor: Remove unused dependency in package-lock.json * Refactor chatView component and add hiddenSession prop * Refactor chatView component and update hiddenSessions prop * Refactor chatView component to use visibleSessions prop instead of hiddenSessions * Refactor IOModal component to remove redundant code * Refactor chatView component to include focusChat prop * Refactor chatView component to include focusChat prop and trigger focus on chat when new session is set * Refactor IOModal component to update visible sessions when new session is added * feat: Add session parameter to buildFlowVertices function * feat: Add someFlowTemplateFields function Add the someFlowTemplateFields function to the reactflowUtils module. This function checks if any of the nodes in the provided array have template fields that pass a given validation function. * feat: Add session parameter to buildFlowVertices function * feat: Add session parameter to buildFlowVertices function * update Session logic on ioModal * Refactor ChatView component: Remove unused eraser button The eraser button in the ChatView component was removed as it was not being used and served no purpose. This change improves code cleanliness and removes unnecessary code. * Refactor Vertex class: Inject session_id if provided in inputs * Refactor build_flow function: Set default session if inputs are empty * Refactor InputValueRequest schema: Add session parameter * Refactor IOModal component: Update session logic * Refactor buildFlowVertices function: Update input handling * Refactor MessagesStoreType in zustand/messages/index.ts: Remove unused columns property and setColumns method * Refactor MessagesStoreType: Remove unused columns property and setColumns method * Refactor SessionView component: Update columns extraction logic * Refactor ChatView component: Remove unused variables * Refactor useGetMessagesQuery: Remove unused setColumns method * Refactor RenderIcons component: Set default value for filteredShortcut prop to prevent bug * create edit message component for chat view * Refactor useUpdateMessage: Add refetch option to trigger query refetch * Refactor IOModal component: Remove unused variables and update useGetMessagesQuery * Refactor ChatView component: Add session ID to message object * update chat message to handle message edit * update types * fix: Update API call to send entire message object * Refactor EditMessageField component: Add timeout to onBlur event * Refactor EditMessageField component: Update layout of edit message field * create migration * add fields to data table * feat: Add "edit" flag to message_dict in update_message API endpoint * Refactor EditMessageField component: Improve onBlur event handling and add button click flag * Refactor code to include "edit" flag in message types * feat: Add EditMessageButton component for editing chat messages * Refactor ChatMessage component: Add EditMessageButton and improve layout * fix: Add refetch query for current flow messages not all flows * Refactor ChatMessage component: Add ShadTooltip for EditMessageButton * add info into edit message field * fix: migrate * fix running chat input directly from the flow * [autofix.ci] apply automated fixes * fix edit flag * Refactor IOModal component to generate a unique session ID based on the current date and time * [autofix.ci] apply automated fixes * Refactor IOModal component to improve session management and interaction * [autofix.ci] apply automated fixes * Refactor sessionSelector component to improve session management and interaction * chore: Refactor sessionSelector component to improve session management and interaction * [autofix.ci] apply automated fixes * create mutation to handle session rename * refactor: Rename useUpdateSession to useUpdateSessionName for clarity * [autofix.ci] apply automated fixes * Refactor sessionSelector component for improved session management and interaction * Refactor sessionSelector component to update visible session on session name change * [autofix.ci] apply automated fixes * add message related events back * chore: Add console logs for debugging in buildFlowVertices function * Refactor IOModal component to update tab trigger label from "Memories" to "Chat" * improve edit name feature * Refactor IOModal component button label to "New Chat" * Refactor sessionSelector component to improve session management and interaction * Refactor IOModal component to remove unused code and improve session management * fix typing error * fix run chat input on component level * prevent toogle visibility on session menu * fix bug on rename session while in table view mode * chore: Update setSelectedView prop type in sessionSelector component * add first test version not working yet * fix bug for renaming and deleting session * refactor: Update sessionSelector component to handle session changes * improve test * fix rename session multiple session bugs * change visible session from array to string * chore: Update editMessageField component to include margin-right for text span * [autofix.ci] apply automated fixes * Update down_revision in Alembic migration script * Refactor IOModal component to simplify session visibility handling * Fix comparison operator for filtering error messages in memory.py * Refactor ChatInput to conditionally store and update messages * Refactor JSON formatting for improved readability in starter projects * Add type casting for message_text and import cast from typing module * Refactor input handling to use direct dictionary access for 'session' and 'input_value' keys * Allow `update_message` to accept `str` type for `message_id` parameter * ⬆️ (pyproject.toml): upgrade duckduckgo-search dependency to version 6.3.1 for bug fixes or new features 🔧 (duckduckgo.spec.ts): refactor test to handle multiple possible outcomes when waiting for selectors and improve readability * Refactor test file: generalBugs-shard-0.spec.ts * Refactor test file: freeze.spec.ts * Refactor test files: update element selectors and actions * Refactor test file: chatInputOutput.spec.ts * [autofix.ci] apply automated fixes * Refactor chatMessage component to handle different types of children content on code modal * [autofix.ci] apply automated fixes --------- Co-authored-by: Gabriel Luiz Freitas Almeida Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: italojohnny Co-authored-by: cristhianzl --- pyproject.toml | 2 +- ...a8e_add_error_and_edit_flags_to_message.py | 49 ++ src/backend/base/langflow/api/v1/chat.py | 3 + src/backend/base/langflow/api/v1/monitor.py | 1 + src/backend/base/langflow/api/v1/schemas.py | 4 + .../langflow/base/astra_assistants/util.py | 2 +- src/backend/base/langflow/base/io/chat.py | 96 +-- .../langflow/components/inputs/ChatInput.py | 15 +- .../langflow/components/outputs/ChatOutput.py | 14 +- .../base/langflow/events/event_manager.py | 4 +- .../base/langflow/graph/vertex/base.py | 15 +- .../starter_projects/Agent Flow.json | 224 ++++-- .../Basic Prompting (Hello, World).json | 135 +++- .../starter_projects/Blog Writer.json | 199 ++++-- .../starter_projects/Complex Agent.json | 650 +++++++++++++----- .../starter_projects/Document QA.json | 193 ++++-- .../starter_projects/Hierarchical Agent.json | 402 ++++++++--- .../starter_projects/Memory Chatbot.json | 195 ++++-- .../starter_projects/Sequential Agent.json | 408 ++++++++--- .../Travel Planning Agents.json | 407 ++++++++--- .../starter_projects/Vector Store RAG.json | 449 +++++++++--- src/backend/base/langflow/memory.py | 4 +- src/backend/base/langflow/schema/message.py | 12 +- .../services/database/models/message/crud.py | 2 +- .../services/database/models/message/model.py | 4 + .../components/renderIconComponent/index.tsx | 2 +- .../API/queries/messages/use-get-messages.ts | 1 - .../messages/use-put-update-messages.ts | 40 +- .../queries/messages/use-rename-session.ts | 42 ++ .../components/sessionSelector/index.tsx | 214 ++++++ .../IOModal/components/SessionView/index.tsx | 40 +- .../components/uploadFileButton/index.tsx | 2 +- .../components/editMessageButton/index.tsx | 13 + .../components/editMessageField/index.tsx | 75 ++ .../components/fileCardWrapper/index.tsx | 26 +- .../components/chatView/chatMessage/index.tsx | 309 ++++++--- .../IOModal/components/chatView/index.tsx | 116 +--- src/frontend/src/modals/IOModal/index.tsx | 175 +++-- src/frontend/src/stores/flowStore.ts | 3 + src/frontend/src/stores/messagesStore.ts | 18 +- src/frontend/src/types/chat/index.ts | 8 +- src/frontend/src/types/components/index.ts | 2 + src/frontend/src/types/messages/index.ts | 4 +- src/frontend/src/types/zustand/flow/index.ts | 2 + .../src/types/zustand/messages/index.ts | 3 +- src/frontend/src/utils/buildUtils.ts | 32 +- src/frontend/src/utils/reactflowUtils.ts | 11 + src/frontend/src/utils/utils.ts | 12 + .../tests/core/features/freeze.spec.ts | 8 +- .../tests/core/features/playground.spec.ts | 262 +++++++ .../core/integrations/Basic Prompting.spec.ts | 3 +- .../core/integrations/Document QA.spec.ts | 4 +- .../core/integrations/Memory Chatbot.spec.ts | 3 +- .../core/integrations/Vector Store.spec.ts | 8 +- .../tests/core/unit/chatInputOutput.spec.ts | 2 +- .../extended/integrations/duckduckgo.spec.ts | 34 +- .../regression/generalBugs-shard-0.spec.ts | 104 --- uv.lock | 26 +- 58 files changed, 3782 insertions(+), 1311 deletions(-) create mode 100644 src/backend/base/langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py create mode 100644 src/frontend/src/controllers/API/queries/messages/use-rename-session.ts create mode 100644 src/frontend/src/modals/IOModal/components/IOFieldView/components/sessionSelector/index.tsx create mode 100644 src/frontend/src/modals/IOModal/components/chatView/chatMessage/components/editMessageButton/index.tsx create mode 100644 src/frontend/src/modals/IOModal/components/chatView/chatMessage/components/editMessageField/index.tsx create mode 100644 src/frontend/tests/core/features/playground.spec.ts delete mode 100644 src/frontend/tests/extended/regression/generalBugs-shard-0.spec.ts diff --git a/pyproject.toml b/pyproject.toml index 1495a6313..fb2a11c60 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,7 +105,7 @@ dependencies = [ "jq>=1.8.0", "pydantic-settings==2.4.0", "ragstack-ai-knowledge-store>=0.2.1", - "duckduckgo-search>=6.3.0", + "duckduckgo-search>=6.3.1", "langchain-elasticsearch>=0.2.0", "opensearch-py>=2.7.1", ] diff --git a/src/backend/base/langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py b/src/backend/base/langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py new file mode 100644 index 000000000..1939cd049 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py @@ -0,0 +1,49 @@ +"""Add error and edit flags to message + +Revision ID: eb5e72293a8e +Revises: 5ace73a7f223 +Create Date: 2024-09-19 16:18:50.828648 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "eb5e72293a8e" +down_revision: Union[str, None] = "5ace73a7f223" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("message")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("message", schema=None) as batch_op: + if "error" not in column_names: + batch_op.add_column(sa.Column("error", sa.Boolean(), nullable=False, server_default=sa.false())) + if "edit" not in column_names: + batch_op.add_column(sa.Column("edit", sa.Boolean(), nullable=False, server_default=sa.false())) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("message")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("message", schema=None) as batch_op: + if "edit" in column_names: + batch_op.drop_column("edit") + if "error" in column_names: + batch_op.drop_column("error") + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py index 2a3a38739..51bc435d6 100644 --- a/src/backend/base/langflow/api/v1/chat.py +++ b/src/backend/base/langflow/api/v1/chat.py @@ -155,6 +155,9 @@ async def build_flow( telemetry_service: TelemetryService = Depends(get_telemetry_service), session=Depends(get_session), ): + if not inputs: + inputs = InputValueRequest(session=str(flow_id)) + async def build_graph_and_get_order() -> tuple[list[str], list[str], Graph]: start_time = time.perf_counter() components_count = None diff --git a/src/backend/base/langflow/api/v1/monitor.py b/src/backend/base/langflow/api/v1/monitor.py index 9f6d0742c..ac3fabf3e 100644 --- a/src/backend/base/langflow/api/v1/monitor.py +++ b/src/backend/base/langflow/api/v1/monitor.py @@ -99,6 +99,7 @@ async def update_message( try: message_dict = message.model_dump(exclude_unset=True, exclude_none=True) + message_dict["edit"] = True db_message.sqlmodel_update(message_dict) session.add(db_message) session.commit() diff --git a/src/backend/base/langflow/api/v1/schemas.py b/src/backend/base/langflow/api/v1/schemas.py index af931c8a2..f986765dd 100644 --- a/src/backend/base/langflow/api/v1/schemas.py +++ b/src/backend/base/langflow/api/v1/schemas.py @@ -297,6 +297,7 @@ class VerticesBuiltResponse(BaseModel): class InputValueRequest(BaseModel): components: list[str] | None = [] input_value: str | None = None + session: str | None = None type: InputType | None = Field( "any", description="Defines on which components the input value should be applied. " @@ -310,9 +311,12 @@ class InputValueRequest(BaseModel): { "components": ["components_id", "Component Name"], "input_value": "input_value", + "session": "session_id", }, {"components": ["Component Name"], "input_value": "input_value"}, {"input_value": "input_value"}, + {"components": ["Component Name"], "input_value": "input_value", "session": "session_id"}, + {"input_value": "input_value", "session": "session_id"}, {"type": "chat", "input_value": "input_value"}, {"type": "json", "input_value": '{"key": "value"}'}, ] diff --git a/src/backend/base/langflow/base/astra_assistants/util.py b/src/backend/base/langflow/base/astra_assistants/util.py index 784880813..7f69b8a1b 100644 --- a/src/backend/base/langflow/base/astra_assistants/util.py +++ b/src/backend/base/langflow/base/astra_assistants/util.py @@ -28,7 +28,7 @@ response = requests.get(url) data = json.loads(response.text) # Extract the model names into a Python list -litellm_model_names = [model for model, _ in data.items() if model != "sample_spec"] +litellm_model_names = [model for model in data if model != "sample_spec"] # To store the class names that extend ToolInterface diff --git a/src/backend/base/langflow/base/io/chat.py b/src/backend/base/langflow/base/io/chat.py index 4c94dbcae..ce21413bf 100644 --- a/src/backend/base/langflow/base/io/chat.py +++ b/src/backend/base/langflow/base/io/chat.py @@ -1,4 +1,5 @@ from collections.abc import AsyncIterator, Iterator +from typing import cast from langflow.custom import Component from langflow.memory import store_message @@ -12,43 +13,52 @@ class ChatComponent(Component): display_name = "Chat Component" description = "Use as base for chat components." - # Keep this method for backward compatibility - def store_message( - self, - message: Message, - ) -> Message: - messages = store_message( - message, - flow_id=self.graph.flow_id, - ) - if len(messages) > 1: + def store_message(self, message: Message) -> Message: + messages = store_message(message, flow_id=self.graph.flow_id) + if len(messages) != 1: msg = "Only one message can be stored at a time." raise ValueError(msg) + stored_message = messages[0] - if ( - hasattr(self, "_event_manager") - and self._event_manager - and stored_message.id - and not isinstance(message.text, str) - ): + self._send_message_event(stored_message) + + if self._should_stream_message(stored_message, message): complete_message = self._stream_message(message, stored_message.id) - message_table = update_message(message_id=stored_message.id, message={"text": complete_message}) - stored_message = Message(**message_table.model_dump()) - self.vertex._added_message = stored_message + stored_message = self._update_stored_message(stored_message.id, complete_message) + self.status = stored_message return stored_message + def _send_message_event(self, message: Message): + if hasattr(self, "_event_manager") and self._event_manager: + self._event_manager.on_message(data=message.data) + + def _should_stream_message(self, stored_message: Message, original_message: Message) -> bool: + return bool( + hasattr(self, "_event_manager") + and self._event_manager + and stored_message.id + and not isinstance(original_message.text, str) + ) + + def _update_stored_message(self, message_id: str, complete_message: str) -> Message: + message_table = update_message(message_id=message_id, message={"text": complete_message}) + updated_message = Message(**message_table.model_dump()) + self.vertex._added_message = updated_message + return updated_message + def _process_chunk(self, chunk: str, complete_message: str, message: Message, message_id: str) -> str: complete_message += chunk - data = { - "text": complete_message, - "chunk": chunk, - "sender": message.sender, - "sender_name": message.sender_name, - "id": str(message_id), - } if self._event_manager: - self._event_manager.on_token(data=data) + self._event_manager.on_token( + data={ + "text": complete_message, + "chunk": chunk, + "sender": message.sender, + "sender_name": message.sender_name, + "id": str(message_id), + } + ) return complete_message async def _handle_async_iterator(self, iterator: AsyncIterator, message: Message, message_id: str) -> str: @@ -69,7 +79,6 @@ class ChatComponent(Component): complete_message = "" for chunk in iterator: complete_message = self._process_chunk(chunk.content, complete_message, message, message_id) - return complete_message def build_with_data( @@ -80,22 +89,25 @@ class ChatComponent(Component): input_value: str | Data | Message | None = None, files: list[str] | None = None, session_id: str | None = None, - return_message: bool | None = False, - ) -> Message: - if isinstance(input_value, Data): - # Update the data of the record - message = Message.from_data(input_value) - else: - message = Message( - text=input_value, sender=sender, sender_name=sender_name, files=files, session_id=session_id - ) + return_message: bool = False, + ) -> str | Message: + message = self._create_message(input_value, sender, sender_name, files, session_id) message_text = message.text if not return_message else message self.status = message_text if session_id and isinstance(message, Message) and isinstance(message.text, str): - messages = store_message( - message, - flow_id=self.graph.flow_id, - ) + messages = store_message(message, flow_id=self.graph.flow_id) self.status = messages - return message_text # type: ignore[return-value] + self._send_messages_events(messages) + + return cast(str | Message, message_text) + + def _create_message(self, input_value, sender, sender_name, files, session_id) -> Message: + if isinstance(input_value, Data): + return Message.from_data(input_value) + return Message(text=input_value, sender=sender, sender_name=sender_name, files=files, session_id=session_id) + + def _send_messages_events(self, messages): + if hasattr(self, "_event_manager") and self._event_manager: + for stored_message in messages: + self._event_manager.on_message(data=stored_message.data) diff --git a/src/backend/base/langflow/components/inputs/ChatInput.py b/src/backend/base/langflow/components/inputs/ChatInput.py index 54b16681a..13f0c3000 100644 --- a/src/backend/base/langflow/components/inputs/ChatInput.py +++ b/src/backend/base/langflow/components/inputs/ChatInput.py @@ -2,7 +2,6 @@ from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES from langflow.base.io.chat import ChatComponent from langflow.inputs import BoolInput from langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output -from langflow.memory import store_message from langflow.schema.message import Message from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER @@ -69,18 +68,12 @@ class ChatInput(ChatComponent): session_id=self.session_id, files=self.files, ) - - if ( - self.session_id - and isinstance(message, Message) - and isinstance(message.text, str) - and self.should_store_message - ): - store_message( + if self.session_id and isinstance(message, Message) and self.should_store_message: + stored_message = self.store_message( message, - flow_id=self.graph.flow_id, ) - self.message.value = message + self.message.value = stored_message + message = stored_message self.status = message return message diff --git a/src/backend/base/langflow/components/outputs/ChatOutput.py b/src/backend/base/langflow/components/outputs/ChatOutput.py index a0a9b8eeb..e8e5e51c0 100644 --- a/src/backend/base/langflow/components/outputs/ChatOutput.py +++ b/src/backend/base/langflow/components/outputs/ChatOutput.py @@ -1,7 +1,6 @@ from langflow.base.io.chat import ChatComponent from langflow.inputs import BoolInput from langflow.io import DropdownInput, MessageTextInput, Output -from langflow.memory import store_message from langflow.schema.message import Message from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER @@ -65,17 +64,12 @@ class ChatOutput(ChatComponent): sender_name=self.sender_name, session_id=self.session_id, ) - if ( - self.session_id - and isinstance(message, Message) - and isinstance(message.text, str) - and self.should_store_message - ): - store_message( + if self.session_id and isinstance(message, Message) and self.should_store_message: + stored_message = self.store_message( message, - flow_id=self.graph.flow_id, ) - self.message.value = message + self.message.value = stored_message + message = stored_message self.status = message return message diff --git a/src/backend/base/langflow/events/event_manager.py b/src/backend/base/langflow/events/event_manager.py index 501f73146..6390ca45d 100644 --- a/src/backend/base/langflow/events/event_manager.py +++ b/src/backend/base/langflow/events/event_manager.py @@ -5,6 +5,7 @@ import time import uuid from functools import partial +from fastapi.encoders import jsonable_encoder from typing_extensions import Protocol from langflow.schema.log import LoggableType @@ -52,7 +53,8 @@ class EventManager: self.events[name] = _callback def send_event(self, *, event_type: str, data: LoggableType): - json_data = {"event": event_type, "data": data} + jsonable_data = jsonable_encoder(data) + json_data = {"event": event_type, "data": jsonable_data} event_id = uuid.uuid4() str_data = json.dumps(json_data) + "\n\n" self.queue.put_nowait((event_id, str_data.encode("utf-8"), time.time())) diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py index e9e6b839f..75dee5752 100644 --- a/src/backend/base/langflow/graph/vertex/base.py +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -793,11 +793,20 @@ class Vertex: # and we are just getting the result for the requester return await self.get_requester_result(requester) self._reset() - + # inject session_id if it is not None + if inputs is not None and "session" in inputs and inputs["session"] is not None and self.has_session_id: + session_id_value = self.get_value_from_template_dict("session_id") + if session_id_value == "": + self.update_raw_params({"session_id": inputs["session"]}, overwrite=True) if self._is_chat_input() and (inputs or files): chat_input = {} - if inputs: - chat_input.update({"input_value": inputs.get(INPUT_FIELD_NAME, "")}) + if ( + inputs + and isinstance(inputs, dict) + and "input_value" in inputs + and inputs["input_value"] is not None + ): + chat_input.update({"input_value": inputs[INPUT_FIELD_NAME]}) if files: chat_input.update({"files": files}) diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Agent Flow.json b/src/backend/base/langflow/initial_setup/starter_projects/Agent Flow.json index 3514d2db0..98f53161d 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Agent Flow.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Agent Flow.json @@ -8,12 +8,16 @@ "dataType": "ToolCallingAgent", "id": "ToolCallingAgent-mf0BN", "name": "response", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-Ag9YG", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -30,12 +34,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-1ioeW", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "ToolCallingAgent-mf0BN", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -52,12 +60,17 @@ "dataType": "CalculatorTool", "id": "CalculatorTool-Nb4P5", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-mf0BN", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -74,12 +87,16 @@ "dataType": "ChatInput", "id": "ChatInput-X3ARP", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ToolCallingAgent-mf0BN", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -95,12 +112,17 @@ "dataType": "PythonREPLTool", "id": "PythonREPLTool-i922a", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-mf0BN", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -116,7 +138,9 @@ "data": { "id": "ChatInput-X3ARP", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -144,7 +168,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -167,7 +193,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "_input_type": "FileInput", @@ -217,7 +243,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -239,7 +267,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -254,7 +285,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -273,7 +306,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -325,7 +360,9 @@ "data": { "id": "ChatOutput-Ag9YG", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -353,7 +390,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -376,7 +415,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "_input_type": "MessageTextInput", @@ -384,7 +423,9 @@ "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -403,7 +444,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -424,7 +467,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -439,7 +485,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -458,7 +506,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -512,7 +562,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-1ioeW", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -545,9 +598,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -567,7 +626,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -580,7 +641,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -615,7 +678,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -731,7 +796,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -796,7 +863,9 @@ "display_name": "System Message", "dynamic": false, "info": "System message to pass to the model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "system_message", @@ -848,7 +917,10 @@ "data": { "id": "ToolCallingAgent-mf0BN", "node": { - "base_classes": ["AgentExecutor", "Message"], + "base_classes": [ + "AgentExecutor", + "Message" + ], "beta": true, "conditional_paths": [], "custom_fields": {}, @@ -879,7 +951,9 @@ "method": "build_agent", "name": "agent", "selected": "AgentExecutor", - "types": ["AgentExecutor"], + "types": [ + "AgentExecutor" + ], "value": "__UNDEFINED__" }, { @@ -888,7 +962,9 @@ "method": "message_response", "name": "response", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -901,7 +977,9 @@ "display_name": "Chat History", "dynamic": false, "info": "", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": true, "name": "chat_history", "placeholder": "", @@ -953,7 +1031,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -972,7 +1052,9 @@ "display_name": "Language Model", "dynamic": false, "info": "", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1005,7 +1087,9 @@ "display_name": "System Prompt", "dynamic": false, "info": "System prompt for the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1025,7 +1109,10 @@ "display_name": "Tools", "dynamic": false, "info": "", - "input_types": ["Tool", "BaseTool"], + "input_types": [ + "Tool", + "BaseTool" + ], "list": true, "load_from_db": false, "name": "tools", @@ -1043,7 +1130,9 @@ "display_name": "Prompt", "dynamic": false, "info": "This prompt must contain 'input' key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1096,7 +1185,12 @@ "data": { "id": "CalculatorTool-Nb4P5", "node": { - "base_classes": ["Data", "list", "Sequence", "Tool"], + "base_classes": [ + "Data", + "list", + "Sequence", + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1104,7 +1198,9 @@ "display_name": "Calculator", "documentation": "", "edited": false, - "field_order": ["expression"], + "field_order": [ + "expression" + ], "frozen": false, "icon": "calculator", "lf_version": "1.0.16", @@ -1117,9 +1213,13 @@ "display_name": "Data", "method": "run_model", "name": "api_run_model", - "required_inputs": ["expression"], + "required_inputs": [ + "expression" + ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -1127,9 +1227,13 @@ "display_name": "Tool", "method": "build_tool", "name": "api_build_tool", - "required_inputs": ["expression"], + "required_inputs": [ + "expression" + ], "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -1160,7 +1264,9 @@ "display_name": "Expression", "dynamic": false, "info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "expression", @@ -1198,7 +1304,10 @@ "display_name": "Python REPL Tool", "id": "PythonREPLTool-i922a", "node": { - "base_classes": ["Data", "Tool"], + "base_classes": [ + "Data", + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1206,7 +1315,12 @@ "display_name": "Python REPL Tool", "documentation": "", "edited": false, - "field_order": ["name", "description", "global_imports", "code"], + "field_order": [ + "name", + "description", + "global_imports", + "code" + ], "frozen": false, "metadata": {}, "output_types": [], @@ -1223,7 +1337,9 @@ "name" ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -1238,7 +1354,9 @@ "name" ], "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json index 08ec53274..72a12e7f4 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json @@ -8,12 +8,17 @@ "dataType": "ChatInput", "id": "ChatInput-AwB1F", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "user_input", "id": "Prompt-bHLxK", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -30,12 +35,16 @@ "dataType": "Prompt", "id": "Prompt-bHLxK", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-tnzXU", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -52,12 +61,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-tnzXU", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-wbcyd", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -75,7 +88,9 @@ "display_name": "Chat Input", "id": "ChatInput-AwB1F", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -102,7 +117,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -125,7 +142,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -173,7 +190,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -193,7 +212,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -207,7 +229,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -225,7 +249,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -279,17 +305,23 @@ "display_name": "Prompt", "id": "Prompt-bHLxK", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["user_input"] + "template": [ + "user_input" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "metadata": {}, @@ -301,7 +333,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -350,7 +384,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -388,7 +425,9 @@ "display_name": "Chat Output", "id": "ChatOutput-wbcyd", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -415,7 +454,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -438,14 +479,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -463,7 +506,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -482,7 +527,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -496,7 +544,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -514,7 +564,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -568,7 +620,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-tnzXU", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -600,9 +655,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -622,7 +683,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -635,7 +698,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -669,7 +734,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -772,7 +839,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json index ab6184f46..1110a5b56 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json @@ -8,12 +8,16 @@ "dataType": "URL", "id": "URL-46k0m", "name": "data", - "output_types": ["Data"] + "output_types": [ + "Data" + ] }, "targetHandle": { "fieldName": "data", "id": "ParseData-jUQRS", - "inputTypes": ["Data"], + "inputTypes": [ + "Data" + ], "type": "other" } }, @@ -30,12 +34,17 @@ "dataType": "ParseData", "id": "ParseData-jUQRS", "name": "text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "references", "id": "Prompt-Pf4QQ", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -52,12 +61,17 @@ "dataType": "TextInput", "id": "TextInput-slCbp", "name": "text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "instructions", "id": "Prompt-Pf4QQ", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -74,12 +88,16 @@ "dataType": "Prompt", "id": "Prompt-Pf4QQ", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-o0Gr0", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -96,12 +114,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-o0Gr0", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-eIVde", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -119,7 +141,9 @@ "display_name": "URL", "id": "URL-46k0m", "node": { - "base_classes": ["Data"], + "base_classes": [ + "Data" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -127,7 +151,9 @@ "display_name": "URL", "documentation": "", "edited": false, - "field_order": ["urls"], + "field_order": [ + "urls" + ], "frozen": false, "icon": "layout-template", "metadata": {}, @@ -139,7 +165,9 @@ "method": "fetch_content", "name": "data", "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -148,7 +176,9 @@ "method": "fetch_content_text", "name": "text", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -181,7 +211,10 @@ "dynamic": false, "info": "Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.", "name": "format", - "options": ["Text", "Raw HTML"], + "options": [ + "Text", + "Raw HTML" + ], "placeholder": "", "required": false, "show": true, @@ -195,7 +228,9 @@ "display_name": "URLs", "dynamic": false, "info": "Enter one or more URLs, by clicking the '+' button.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": true, "load_from_db": false, "name": "urls", @@ -206,7 +241,10 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": ["langflow.org/", "docs.langflow.org/"] + "value": [ + "langflow.org/", + "docs.langflow.org/" + ] } } }, @@ -233,7 +271,9 @@ "display_name": "Parse Data", "id": "ParseData-jUQRS", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -241,7 +281,11 @@ "display_name": "Parse Data", "documentation": "", "edited": false, - "field_order": ["data", "template", "sep"], + "field_order": [ + "data", + "template", + "sep" + ], "frozen": false, "icon": "braces", "metadata": {}, @@ -253,7 +297,9 @@ "method": "parse_data", "name": "text", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -283,7 +329,9 @@ "display_name": "Data", "dynamic": false, "info": "The data to convert to text.", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": false, "name": "data", "placeholder": "", @@ -316,7 +364,9 @@ "display_name": "Template", "dynamic": false, "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -355,17 +405,24 @@ "display_name": "Prompt", "id": "Prompt-Pf4QQ", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["references", "instructions"] + "template": [ + "references", + "instructions" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "metadata": {}, @@ -377,7 +434,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -410,7 +469,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -431,7 +493,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -485,7 +550,9 @@ "display_name": "Instructions", "id": "TextInput-slCbp", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -493,7 +560,9 @@ "display_name": "Instructions", "documentation": "", "edited": false, - "field_order": ["input_value"], + "field_order": [ + "input_value" + ], "frozen": false, "icon": "type", "output_types": [], @@ -504,7 +573,9 @@ "method": "text_response", "name": "text", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -535,7 +606,9 @@ "display_name": "Text", "dynamic": false, "info": "Text to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -574,7 +647,9 @@ "display_name": "Chat Output", "id": "ChatOutput-eIVde", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -601,7 +676,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -624,14 +701,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -649,7 +728,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -668,7 +749,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -682,7 +766,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -700,7 +786,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -754,7 +842,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-o0Gr0", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -786,9 +877,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -808,7 +905,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -821,7 +920,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -855,7 +956,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -958,7 +1061,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json index f67aa36c8..33cad0af8 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json @@ -8,12 +8,16 @@ "dataType": "HierarchicalCrewComponent", "id": "HierarchicalCrewComponent-3JGY1", "name": "output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-8FKry", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -31,12 +35,16 @@ "dataType": "HierarchicalTaskComponent", "id": "HierarchicalTaskComponent-GuXH7", "name": "task_output", - "output_types": ["HierarchicalTask"] + "output_types": [ + "HierarchicalTask" + ] }, "targetHandle": { "fieldName": "tasks", "id": "HierarchicalCrewComponent-3JGY1", - "inputTypes": ["HierarchicalTask"], + "inputTypes": [ + "HierarchicalTask" + ], "type": "other" } }, @@ -54,12 +62,16 @@ "dataType": "CrewAIAgentComponent", "id": "CrewAIAgentComponent-ViX9d", "name": "output", - "output_types": ["Agent"] + "output_types": [ + "Agent" + ] }, "targetHandle": { "fieldName": "agents", "id": "HierarchicalCrewComponent-3JGY1", - "inputTypes": ["Agent"], + "inputTypes": [ + "Agent" + ], "type": "other" } }, @@ -77,12 +89,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-ucjrK", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "CrewAIAgentComponent-ViX9d", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -100,12 +116,16 @@ "dataType": "CrewAIAgentComponent", "id": "CrewAIAgentComponent-aCLi7", "name": "output", - "output_types": ["Agent"] + "output_types": [ + "Agent" + ] }, "targetHandle": { "fieldName": "manager_agent", "id": "HierarchicalCrewComponent-3JGY1", - "inputTypes": ["Agent"], + "inputTypes": [ + "Agent" + ], "type": "other" } }, @@ -123,12 +143,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-wMVJn", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "CrewAIAgentComponent-aCLi7", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -146,12 +170,16 @@ "dataType": "Prompt", "id": "Prompt-gbnQU", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "task_description", "id": "HierarchicalTaskComponent-GuXH7", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -169,12 +197,17 @@ "dataType": "ChatInput", "id": "ChatInput-1Gki8", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "query", "id": "Prompt-gbnQU", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -192,12 +225,16 @@ "dataType": "YFinanceTool", "id": "YFinanceTool-gzeyq", "name": "tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "CrewAIAgentComponent-ViX9d", - "inputTypes": ["Tool"], + "inputTypes": [ + "Tool" + ], "type": "other" } }, @@ -215,12 +252,16 @@ "dataType": "Prompt", "id": "Prompt-qCC5T", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-KnjCY", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -238,12 +279,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-KnjCY", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "role", "id": "CrewAIAgentComponent-ViX9d", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -261,12 +306,17 @@ "dataType": "ChatInput", "id": "ChatInput-1Gki8", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "query", "id": "Prompt-5wrVb", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -284,12 +334,17 @@ "dataType": "ChatInput", "id": "ChatInput-1Gki8", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "query", "id": "Prompt-qCC5T", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -307,12 +362,16 @@ "dataType": "Prompt", "id": "Prompt-5wrVb", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-wOSDH", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -330,12 +389,17 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-KnjCY", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "role", "id": "Prompt-5wrVb", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -353,12 +417,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-wOSDH", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "goal", "id": "CrewAIAgentComponent-ViX9d", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -376,12 +444,16 @@ "dataType": "Prompt", "id": "Prompt-g5dyc", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-lajm6", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -399,12 +471,17 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-wOSDH", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "goal", "id": "Prompt-g5dyc", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -421,12 +498,17 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-KnjCY", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "role", "id": "Prompt-g5dyc", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -443,12 +525,17 @@ "dataType": "ChatInput", "id": "ChatInput-1Gki8", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "query", "id": "Prompt-g5dyc", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -465,12 +552,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-lajm6", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "backstory", "id": "CrewAIAgentComponent-ViX9d", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -487,12 +578,16 @@ "dataType": "SearchAPI", "id": "SearchAPI-1mPpu", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "CrewAIAgentComponent-ViX9d", - "inputTypes": ["Tool"], + "inputTypes": [ + "Tool" + ], "type": "other" } }, @@ -510,7 +605,9 @@ "display_name": "Hierarchical Crew", "id": "HierarchicalCrewComponent-3JGY1", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -542,7 +639,9 @@ "name": "output", "required_inputs": [], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -554,7 +653,9 @@ "display_name": "Agents", "dynamic": false, "info": "", - "input_types": ["Agent"], + "input_types": [ + "Agent" + ], "list": true, "name": "agents", "placeholder": "", @@ -588,7 +689,9 @@ "display_name": "Function Calling LLM", "dynamic": false, "info": "Turns the ReAct CrewAI agent into a function-calling agent", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "function_calling_llm", "placeholder": "", @@ -604,7 +707,9 @@ "display_name": "Manager Agent", "dynamic": false, "info": "", - "input_types": ["Agent"], + "input_types": [ + "Agent" + ], "list": false, "name": "manager_agent", "placeholder": "", @@ -620,7 +725,9 @@ "display_name": "Manager LLM", "dynamic": false, "info": "", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "manager_llm", "placeholder": "", @@ -681,7 +788,9 @@ "display_name": "Tasks", "dynamic": false, "info": "", - "input_types": ["HierarchicalTask"], + "input_types": [ + "HierarchicalTask" + ], "list": true, "name": "tasks", "placeholder": "", @@ -740,7 +849,10 @@ "data": { "id": "OpenAIModel-ucjrK", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -772,9 +884,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -794,7 +912,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -807,7 +927,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -841,7 +963,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -944,7 +1068,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -1056,7 +1182,9 @@ "display_name": "Chat Output", "id": "ChatOutput-8FKry", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1083,7 +1211,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1106,14 +1236,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -1131,7 +1263,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -1150,7 +1284,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -1164,7 +1301,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -1182,7 +1321,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -1230,7 +1371,9 @@ "display_name": "Hierarchical Task", "id": "HierarchicalTaskComponent-GuXH7", "node": { - "base_classes": ["HierarchicalTask"], + "base_classes": [ + "HierarchicalTask" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1238,7 +1381,11 @@ "display_name": "Hierarchical Task", "documentation": "", "edited": false, - "field_order": ["task_description", "expected_output", "tools"], + "field_order": [ + "task_description", + "expected_output", + "tools" + ], "frozen": false, "icon": "CrewAI", "metadata": {}, @@ -1250,7 +1397,9 @@ "method": "build_task", "name": "task_output", "selected": "HierarchicalTask", - "types": ["HierarchicalTask"], + "types": [ + "HierarchicalTask" + ], "value": "__UNDEFINED__" } ], @@ -1280,7 +1429,9 @@ "display_name": "Expected Output", "dynamic": false, "info": "Clear definition of expected task outcome.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1299,7 +1450,9 @@ "display_name": "Description", "dynamic": false, "info": "Descriptive text detailing task's purpose and execution.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1318,7 +1471,9 @@ "display_name": "Tools", "dynamic": false, "info": "List of tools/resources limited for task execution. Uses the Agent tools by default.", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -1349,7 +1504,9 @@ "display_name": "CrewAI Agent", "id": "CrewAIAgentComponent-ViX9d", "node": { - "base_classes": ["Agent"], + "base_classes": [ + "Agent" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1380,7 +1537,9 @@ "method": "build_output", "name": "output", "selected": "Agent", - "types": ["Agent"], + "types": [ + "Agent" + ], "value": "__UNDEFINED__" } ], @@ -1422,7 +1581,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1459,7 +1620,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1493,7 +1656,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1524,7 +1689,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1543,7 +1710,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agents disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -1590,7 +1759,9 @@ "display_name": "CrewAI Agent", "id": "CrewAIAgentComponent-aCLi7", "node": { - "base_classes": ["Agent"], + "base_classes": [ + "Agent" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1621,7 +1792,9 @@ "method": "build_output", "name": "output", "selected": "Agent", - "types": ["Agent"], + "types": [ + "Agent" + ], "value": "__UNDEFINED__" } ], @@ -1663,7 +1836,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1700,7 +1875,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1734,7 +1911,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1765,7 +1944,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1784,7 +1965,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agents disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -1833,7 +2016,10 @@ "data": { "id": "OpenAIModel-wMVJn", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1865,9 +2051,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -1887,7 +2079,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -1900,7 +2094,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -1934,7 +2130,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -2037,7 +2235,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -2149,18 +2349,24 @@ "display_name": "Prompt", "id": "Prompt-gbnQU", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["query"] + "template": [ + "query" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, "error": null, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "full_path": null, "icon": "prompts", @@ -2177,7 +2383,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -2210,7 +2418,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2257,7 +2468,9 @@ "data": { "id": "ChatInput-1Gki8", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2284,7 +2497,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -2307,7 +2522,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -2355,7 +2570,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2375,7 +2592,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -2389,7 +2609,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -2407,7 +2629,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -2460,7 +2684,9 @@ "display_name": "Yahoo Finance News Tool", "id": "YFinanceTool-gzeyq", "node": { - "base_classes": ["Tool"], + "base_classes": [ + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2479,7 +2705,9 @@ "method": "run_model", "name": "api_run_model", "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -2488,7 +2716,9 @@ "method": "build_tool", "name": "tool", "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -2519,7 +2749,9 @@ "display_name": "Query", "dynamic": false, "info": "Input should be a company ticker. For example, AAPL for Apple, MSFT for Microsoft.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -2555,7 +2787,10 @@ "data": { "id": "OpenAIModel-KnjCY", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2587,9 +2822,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -2609,7 +2850,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -2622,7 +2865,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -2656,7 +2901,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -2759,7 +3006,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -2871,17 +3120,23 @@ "display_name": "Role Prompt", "id": "Prompt-qCC5T", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["query"] + "template": [ + "query" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Role Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "output_types": [], @@ -2892,7 +3147,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -2925,7 +3182,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2977,7 +3237,10 @@ "data": { "id": "OpenAIModel-wOSDH", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -3009,9 +3272,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -3031,7 +3300,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -3044,7 +3315,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -3078,7 +3351,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -3181,7 +3456,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -3293,17 +3570,24 @@ "display_name": "Goal Prompt", "id": "Prompt-5wrVb", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["query", "role"] + "template": [ + "query", + "role" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Goal Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "output_types": [], @@ -3314,7 +3598,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -3347,7 +3633,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -3367,7 +3656,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -3419,7 +3711,10 @@ "data": { "id": "OpenAIModel-lajm6", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -3451,9 +3746,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -3473,7 +3774,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -3486,7 +3789,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -3520,7 +3825,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -3623,7 +3930,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -3735,18 +4044,26 @@ "display_name": "Prompt", "id": "Prompt-g5dyc", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["query", "role", "goal"] + "template": [ + "query", + "role", + "goal" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, "error": null, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "full_path": null, "icon": "prompts", @@ -3763,7 +4080,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -3796,7 +4115,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -3817,7 +4139,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -3838,7 +4163,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -3891,7 +4219,10 @@ "display_name": "Search API", "id": "SearchAPI-1mPpu", "node": { - "base_classes": ["Data", "Tool"], + "base_classes": [ + "Data", + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -3923,7 +4254,9 @@ "search_params" ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -3940,7 +4273,9 @@ "search_params" ], "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -3952,7 +4287,9 @@ "display_name": "SearchAPI API Key", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -3986,7 +4323,9 @@ "display_name": "Engine", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "engine", @@ -4004,7 +4343,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -4103,4 +4444,5 @@ "openai", "chatbots" ] + } diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json index ce94800c1..afa9a100d 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json @@ -8,12 +8,17 @@ "dataType": "ChatInput", "id": "ChatInput-Emi4q", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "Question", "id": "Prompt-n8yRL", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -30,12 +35,16 @@ "dataType": "Prompt", "id": "Prompt-n8yRL", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-1hwZ2", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -52,12 +61,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-1hwZ2", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-sD0lp", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -74,12 +87,17 @@ "dataType": "ParseData", "id": "ParseData-qYLes", "name": "text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "Document", "id": "Prompt-n8yRL", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -96,12 +114,16 @@ "dataType": "File", "id": "File-0oa6O", "name": "data", - "output_types": ["Data"] + "output_types": [ + "Data" + ] }, "targetHandle": { "fieldName": "data", "id": "ParseData-qYLes", - "inputTypes": ["Data"], + "inputTypes": [ + "Data" + ], "type": "other" } }, @@ -119,17 +141,24 @@ "display_name": "Prompt", "id": "Prompt-n8yRL", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["Document", "Question"] + "template": [ + "Document", + "Question" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "metadata": {}, @@ -141,7 +170,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -155,7 +186,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -176,7 +210,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -249,7 +286,9 @@ "display_name": "Chat Input", "id": "ChatInput-Emi4q", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -276,7 +315,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -299,7 +340,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -347,7 +388,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -367,7 +410,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -381,7 +427,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -399,7 +447,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -453,7 +503,9 @@ "display_name": "Chat Output", "id": "ChatOutput-sD0lp", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -480,7 +532,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -503,14 +557,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -528,7 +584,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -547,7 +605,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -561,7 +622,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -579,7 +642,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -633,7 +698,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-1hwZ2", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -665,9 +733,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -687,7 +761,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -700,7 +776,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -734,7 +812,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -837,7 +917,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -949,7 +1031,9 @@ "display_name": "Parse Data", "id": "ParseData-qYLes", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -957,7 +1041,11 @@ "display_name": "Parse Data", "documentation": "", "edited": false, - "field_order": ["data", "template", "sep"], + "field_order": [ + "data", + "template", + "sep" + ], "frozen": false, "icon": "braces", "metadata": {}, @@ -969,7 +1057,9 @@ "method": "parse_data", "name": "text", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -999,7 +1089,9 @@ "display_name": "Data", "dynamic": false, "info": "The data to convert to text.", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": false, "name": "data", "placeholder": "", @@ -1032,7 +1124,9 @@ "display_name": "Template", "dynamic": false, "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1071,7 +1165,9 @@ "display_name": "File", "id": "File-0oa6O", "node": { - "base_classes": ["Data"], + "base_classes": [ + "Data" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1079,7 +1175,10 @@ "display_name": "File", "documentation": "", "edited": false, - "field_order": ["path", "silent_errors"], + "field_order": [ + "path", + "silent_errors" + ], "frozen": false, "icon": "file-text", "metadata": {}, @@ -1091,7 +1190,9 @@ "method": "load_file", "name": "data", "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" } ], diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json index 80fb793f4..ce8c52f7a 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json @@ -8,12 +8,16 @@ "dataType": "HierarchicalCrewComponent", "id": "HierarchicalCrewComponent-Y0Uvf", "name": "output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-VzVJK", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -31,12 +35,16 @@ "dataType": "HierarchicalTaskComponent", "id": "HierarchicalTaskComponent-hE8H5", "name": "task_output", - "output_types": ["HierarchicalTask"] + "output_types": [ + "HierarchicalTask" + ] }, "targetHandle": { "fieldName": "tasks", "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": ["HierarchicalTask"], + "inputTypes": [ + "HierarchicalTask" + ], "type": "other" } }, @@ -54,12 +62,16 @@ "dataType": "CrewAIAgentComponent", "id": "CrewAIAgentComponent-EbpXd", "name": "output", - "output_types": ["Agent"] + "output_types": [ + "Agent" + ] }, "targetHandle": { "fieldName": "agents", "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": ["Agent"], + "inputTypes": [ + "Agent" + ], "type": "other" } }, @@ -77,12 +89,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-Yjtpu", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "CrewAIAgentComponent-EbpXd", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -100,12 +116,16 @@ "dataType": "CrewAIAgentComponent", "id": "CrewAIAgentComponent-9D8ao", "name": "output", - "output_types": ["Agent"] + "output_types": [ + "Agent" + ] }, "targetHandle": { "fieldName": "manager_agent", "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": ["Agent"], + "inputTypes": [ + "Agent" + ], "type": "other" } }, @@ -123,12 +143,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-HgNnu", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "CrewAIAgentComponent-9D8ao", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -146,12 +170,16 @@ "dataType": "Prompt", "id": "Prompt-eqGhn", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "task_description", "id": "HierarchicalTaskComponent-hE8H5", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -168,12 +196,17 @@ "dataType": "ChatInput", "id": "ChatInput-xgRl9", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "query", "id": "Prompt-eqGhn", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -190,12 +223,16 @@ "dataType": "CrewAIAgentComponent", "id": "CrewAIAgentComponent-UMpxO", "name": "output", - "output_types": ["Agent"] + "output_types": [ + "Agent" + ] }, "targetHandle": { "fieldName": "agents", "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": ["Agent"], + "inputTypes": [ + "Agent" + ], "type": "other" } }, @@ -212,12 +249,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-Yjtpu", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "CrewAIAgentComponent-UMpxO", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -234,12 +275,16 @@ "dataType": "SearchAPI", "id": "SearchAPI-Yokat", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "CrewAIAgentComponent-EbpXd", - "inputTypes": ["Tool"], + "inputTypes": [ + "Tool" + ], "type": "other" } }, @@ -257,7 +302,9 @@ "display_name": "Hierarchical Crew", "id": "HierarchicalCrewComponent-Y0Uvf", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -289,7 +336,9 @@ "name": "output", "required_inputs": [], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -301,7 +350,9 @@ "display_name": "Agents", "dynamic": false, "info": "", - "input_types": ["Agent"], + "input_types": [ + "Agent" + ], "list": true, "name": "agents", "placeholder": "", @@ -335,7 +386,9 @@ "display_name": "Function Calling LLM", "dynamic": false, "info": "Turns the ReAct CrewAI agent into a function-calling agent", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "function_calling_llm", "placeholder": "", @@ -351,7 +404,9 @@ "display_name": "Manager Agent", "dynamic": false, "info": "", - "input_types": ["Agent"], + "input_types": [ + "Agent" + ], "list": false, "name": "manager_agent", "placeholder": "", @@ -367,7 +422,9 @@ "display_name": "Manager LLM", "dynamic": false, "info": "", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "manager_llm", "placeholder": "", @@ -428,7 +485,9 @@ "display_name": "Tasks", "dynamic": false, "info": "", - "input_types": ["HierarchicalTask"], + "input_types": [ + "HierarchicalTask" + ], "list": true, "name": "tasks", "placeholder": "", @@ -490,7 +549,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-Yjtpu", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -522,9 +584,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -544,7 +612,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -556,7 +626,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -590,7 +662,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -694,7 +768,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -755,7 +831,9 @@ "display_name": "System Message", "dynamic": false, "info": "System message to pass to the model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "system_message", @@ -808,7 +886,9 @@ "display_name": "Chat Output", "id": "ChatOutput-VzVJK", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -835,7 +915,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -858,14 +940,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -883,7 +967,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -902,7 +988,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -916,7 +1005,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -934,7 +1025,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -987,7 +1080,9 @@ "display_name": "Hierarchical Task", "id": "HierarchicalTaskComponent-hE8H5", "node": { - "base_classes": ["HierarchicalTask"], + "base_classes": [ + "HierarchicalTask" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -995,7 +1090,11 @@ "display_name": "Hierarchical Task", "documentation": "", "edited": false, - "field_order": ["task_description", "expected_output", "tools"], + "field_order": [ + "task_description", + "expected_output", + "tools" + ], "frozen": false, "icon": "CrewAI", "metadata": {}, @@ -1007,7 +1106,9 @@ "method": "build_task", "name": "task_output", "selected": "HierarchicalTask", - "types": ["HierarchicalTask"], + "types": [ + "HierarchicalTask" + ], "value": "__UNDEFINED__" } ], @@ -1037,7 +1138,9 @@ "display_name": "Expected Output", "dynamic": false, "info": "Clear definition of expected task outcome.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1056,7 +1159,9 @@ "display_name": "Description", "dynamic": false, "info": "Descriptive text detailing task's purpose and execution.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1075,7 +1180,9 @@ "display_name": "Tools", "dynamic": false, "info": "List of tools/resources limited for task execution. Uses the Agent tools by default.", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -1111,7 +1218,9 @@ "display_name": "CrewAI Agent", "id": "CrewAIAgentComponent-EbpXd", "node": { - "base_classes": ["Agent"], + "base_classes": [ + "Agent" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1142,7 +1251,9 @@ "method": "build_output", "name": "output", "selected": "Agent", - "types": ["Agent"], + "types": [ + "Agent" + ], "value": "__UNDEFINED__" } ], @@ -1184,7 +1295,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1221,7 +1334,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1255,7 +1370,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1286,7 +1403,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1305,7 +1424,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agents disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -1356,7 +1477,9 @@ "display_name": "CrewAI Agent", "id": "CrewAIAgentComponent-9D8ao", "node": { - "base_classes": ["Agent"], + "base_classes": [ + "Agent" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1387,7 +1510,9 @@ "method": "build_output", "name": "output", "selected": "Agent", - "types": ["Agent"], + "types": [ + "Agent" + ], "value": "__UNDEFINED__" } ], @@ -1429,7 +1554,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1466,7 +1593,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1500,7 +1629,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1531,7 +1662,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1550,7 +1683,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agents disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -1601,7 +1736,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-HgNnu", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1633,9 +1771,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -1655,7 +1799,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -1667,7 +1813,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -1701,7 +1849,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -1805,7 +1955,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -1866,7 +2018,9 @@ "display_name": "System Message", "dynamic": false, "info": "System message to pass to the model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "system_message", @@ -1919,18 +2073,24 @@ "display_name": "Prompt", "id": "Prompt-eqGhn", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["query"] + "template": [ + "query" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, "error": null, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "full_path": null, "icon": "prompts", @@ -1947,7 +2107,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1980,7 +2142,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2031,7 +2196,9 @@ "data": { "id": "ChatInput-xgRl9", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2058,7 +2225,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -2081,7 +2250,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -2129,7 +2298,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2149,7 +2320,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -2163,7 +2337,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -2181,7 +2357,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -2234,7 +2412,9 @@ "display_name": "CrewAI Agent", "id": "CrewAIAgentComponent-UMpxO", "node": { - "base_classes": ["Agent"], + "base_classes": [ + "Agent" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2265,7 +2445,9 @@ "method": "build_output", "name": "output", "selected": "Agent", - "types": ["Agent"], + "types": [ + "Agent" + ], "value": "__UNDEFINED__" } ], @@ -2307,7 +2489,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2344,7 +2528,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2378,7 +2564,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -2409,7 +2597,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2428,7 +2618,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agents disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -2477,7 +2669,10 @@ "data": { "id": "SearchAPI-Yokat", "node": { - "base_classes": ["Data", "Tool"], + "base_classes": [ + "Data", + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2509,7 +2704,9 @@ "search_params" ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -2526,7 +2723,9 @@ "search_params" ], "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -2538,7 +2737,9 @@ "display_name": "SearchAPI API Key", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -2572,7 +2773,9 @@ "display_name": "Engine", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "engine", @@ -2590,7 +2793,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2689,4 +2894,5 @@ "openai", "chatbots" ] + } diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json index a37705424..467f64fc4 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json @@ -8,12 +8,17 @@ "dataType": "ChatInput", "id": "ChatInput-6yuNd", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "user_message", "id": "Prompt-tifRl", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -30,12 +35,16 @@ "dataType": "Prompt", "id": "Prompt-tifRl", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-ZIeE0", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -52,12 +61,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-ZIeE0", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-c3v9q", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -74,12 +87,17 @@ "dataType": "Memory", "id": "Memory-6s5g1", "name": "messages_text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "context", "id": "Prompt-tifRl", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -97,17 +115,24 @@ "display_name": "Prompt", "id": "Prompt-tifRl", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["context", "user_message"] + "template": [ + "context", + "user_message" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "metadata": {}, @@ -119,7 +144,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -152,7 +179,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -189,7 +219,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -227,7 +260,9 @@ "display_name": "Chat Input", "id": "ChatInput-6yuNd", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -254,7 +289,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -277,7 +314,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -325,7 +362,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -345,7 +384,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -359,7 +401,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -377,7 +421,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -431,7 +477,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-ZIeE0", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -463,9 +512,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -485,7 +540,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -498,7 +555,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -532,7 +591,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -635,7 +696,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -747,7 +810,9 @@ "display_name": "Chat Output", "id": "ChatOutput-c3v9q", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -774,7 +839,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -797,14 +864,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -822,7 +891,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -841,7 +912,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -855,7 +929,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -873,7 +949,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -922,7 +1000,11 @@ "display_name": "Chat Memory", "id": "Memory-6s5g1", "node": { - "base_classes": ["BaseChatMemory", "Data", "Message"], + "base_classes": [ + "BaseChatMemory", + "Data", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -950,7 +1032,9 @@ "method": "retrieve_messages", "name": "messages", "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -959,7 +1043,9 @@ "method": "retrieve_messages_as_text", "name": "messages_text", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -968,7 +1054,9 @@ "method": "build_lc_memory", "name": "lc_memory", "selected": "BaseChatMemory", - "types": ["BaseChatMemory"], + "types": [ + "BaseChatMemory" + ], "value": "__UNDEFINED__" } ], @@ -998,7 +1086,9 @@ "display_name": "External Memory", "dynamic": false, "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", - "input_types": ["BaseChatMessageHistory"], + "input_types": [ + "BaseChatMessageHistory" + ], "list": false, "name": "memory", "placeholder": "", @@ -1030,7 +1120,10 @@ "dynamic": false, "info": "Order of the messages.", "name": "order", - "options": ["Ascending", "Descending"], + "options": [ + "Ascending", + "Descending" + ], "placeholder": "", "required": false, "show": true, @@ -1045,7 +1138,11 @@ "dynamic": false, "info": "Filter by sender type.", "name": "sender", - "options": ["Machine", "User", "Machine and User"], + "options": [ + "Machine", + "User", + "Machine and User" + ], "placeholder": "", "required": false, "show": true, @@ -1059,7 +1156,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Filter by sender name.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -1077,7 +1176,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -1095,7 +1196,9 @@ "display_name": "Template", "dynamic": false, "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json index b73003b23..ca6123e3a 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json @@ -8,12 +8,16 @@ "dataType": "SequentialCrewComponent", "id": "SequentialCrewComponent-3dbbB", "name": "output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-nwCjg", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -30,12 +34,17 @@ "dataType": "TextInput", "id": "TextInput-6QUGr", "name": "text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "topic", "id": "Prompt-GOdlL", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -52,12 +61,17 @@ "dataType": "TextInput", "id": "TextInput-6QUGr", "name": "text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "topic", "id": "Prompt-824D7", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -74,12 +88,17 @@ "dataType": "TextInput", "id": "TextInput-6QUGr", "name": "text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "topic", "id": "Prompt-0vHob", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -96,12 +115,16 @@ "dataType": "Prompt", "id": "Prompt-GOdlL", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "task_description", "id": "SequentialTaskAgentComponent-GWMA1", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -118,12 +141,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-lQ5HF", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "SequentialTaskAgentComponent-GWMA1", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -139,12 +166,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-lQ5HF", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "SequentialTaskAgentComponent-5i4Wg", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -160,12 +191,16 @@ "dataType": "Prompt", "id": "Prompt-824D7", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "task_description", "id": "SequentialTaskAgentComponent-5i4Wg", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -181,12 +216,16 @@ "dataType": "SequentialTaskAgentComponent", "id": "SequentialTaskAgentComponent-GWMA1", "name": "task_output", - "output_types": ["SequentialTask"] + "output_types": [ + "SequentialTask" + ] }, "targetHandle": { "fieldName": "previous_task", "id": "SequentialTaskAgentComponent-5i4Wg", - "inputTypes": ["SequentialTask"], + "inputTypes": [ + "SequentialTask" + ], "type": "other" } }, @@ -202,12 +241,16 @@ "dataType": "SequentialTaskAgentComponent", "id": "SequentialTaskAgentComponent-5i4Wg", "name": "task_output", - "output_types": ["SequentialTask"] + "output_types": [ + "SequentialTask" + ] }, "targetHandle": { "fieldName": "previous_task", "id": "SequentialTaskAgentComponent-TPEWE", - "inputTypes": ["SequentialTask"], + "inputTypes": [ + "SequentialTask" + ], "type": "other" } }, @@ -223,12 +266,16 @@ "dataType": "Prompt", "id": "Prompt-0vHob", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "task_description", "id": "SequentialTaskAgentComponent-TPEWE", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -244,12 +291,16 @@ "dataType": "SequentialTaskAgentComponent", "id": "SequentialTaskAgentComponent-TPEWE", "name": "task_output", - "output_types": ["SequentialTask"] + "output_types": [ + "SequentialTask" + ] }, "targetHandle": { "fieldName": "tasks", "id": "SequentialCrewComponent-3dbbB", - "inputTypes": ["SequentialTask"], + "inputTypes": [ + "SequentialTask" + ], "type": "other" } }, @@ -265,12 +316,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-lQ5HF", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "SequentialTaskAgentComponent-TPEWE", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -286,12 +341,16 @@ "dataType": "YFinanceTool", "id": "YFinanceTool-Asoka", "name": "tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "SequentialTaskAgentComponent-GWMA1", - "inputTypes": ["Tool"], + "inputTypes": [ + "Tool" + ], "type": "other" } }, @@ -309,7 +368,9 @@ "display_name": "Sequential Crew", "id": "SequentialCrewComponent-3dbbB", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -339,7 +400,9 @@ "name": "output", "required_inputs": [], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -369,7 +432,9 @@ "display_name": "Function Calling LLM", "dynamic": false, "info": "Turns the ReAct CrewAI agent into a function-calling agent", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "function_calling_llm", "placeholder": "", @@ -430,7 +495,9 @@ "display_name": "Tasks", "dynamic": false, "info": "", - "input_types": ["SequentialTask"], + "input_types": [ + "SequentialTask" + ], "list": true, "name": "tasks", "placeholder": "", @@ -494,7 +561,10 @@ "data": { "id": "OpenAIModel-lQ5HF", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -527,9 +597,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -549,7 +625,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -562,7 +640,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -596,7 +676,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -699,7 +781,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -811,7 +895,9 @@ "display_name": "Chat Output", "id": "ChatOutput-nwCjg", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -839,7 +925,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -862,14 +950,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -887,7 +977,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -906,7 +998,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -920,7 +1015,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -938,7 +1035,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -985,7 +1084,9 @@ "data": { "id": "TextInput-6QUGr", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -993,7 +1094,9 @@ "display_name": "Topic", "documentation": "", "edited": false, - "field_order": ["input_value"], + "field_order": [ + "input_value" + ], "frozen": false, "icon": "type", "lf_version": "1.0.15", @@ -1005,7 +1108,9 @@ "method": "text_response", "name": "text", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1035,7 +1140,9 @@ "display_name": "Text", "dynamic": false, "info": "Text to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -1073,17 +1180,23 @@ "display_name": "Prompt", "id": "Prompt-GOdlL", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["topic"] + "template": [ + "topic" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "lf_version": "1.0.15", @@ -1096,7 +1209,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1145,7 +1260,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1183,17 +1301,23 @@ "display_name": "Prompt", "id": "Prompt-824D7", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["topic"] + "template": [ + "topic" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "lf_version": "1.0.15", @@ -1206,7 +1330,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1255,7 +1381,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1293,17 +1422,23 @@ "display_name": "Prompt", "id": "Prompt-0vHob", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["topic"] + "template": [ + "topic" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "lf_version": "1.0.15", @@ -1316,7 +1451,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1365,7 +1502,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1401,7 +1541,9 @@ "data": { "id": "SequentialTaskAgentComponent-GWMA1", "node": { - "base_classes": ["SequentialTask"], + "base_classes": [ + "SequentialTask" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1437,7 +1579,9 @@ "method": "build_agent_and_task", "name": "task_output", "selected": "SequentialTask", - "types": ["SequentialTask"], + "types": [ + "SequentialTask" + ], "value": "__UNDEFINED__" } ], @@ -1514,7 +1658,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1552,7 +1698,9 @@ "display_name": "Expected Task Output", "dynamic": false, "info": "Clear definition of expected task outcome.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1572,7 +1720,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1592,7 +1742,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1625,7 +1777,9 @@ "display_name": "Previous Task", "dynamic": false, "info": "The previous task in the sequence (for chaining).", - "input_types": ["SequentialTask"], + "input_types": [ + "SequentialTask" + ], "list": false, "name": "previous_task", "placeholder": "", @@ -1642,7 +1796,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1662,7 +1818,9 @@ "display_name": "Task Description", "dynamic": false, "info": "Descriptive text detailing task's purpose and execution.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1682,7 +1840,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agent's disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -1732,7 +1892,9 @@ "data": { "id": "SequentialTaskAgentComponent-5i4Wg", "node": { - "base_classes": ["SequentialTask"], + "base_classes": [ + "SequentialTask" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1768,7 +1930,9 @@ "method": "build_agent_and_task", "name": "task_output", "selected": "SequentialTask", - "types": ["SequentialTask"], + "types": [ + "SequentialTask" + ], "value": "__UNDEFINED__" } ], @@ -1845,7 +2009,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1883,7 +2049,9 @@ "display_name": "Expected Task Output", "dynamic": false, "info": "Clear definition of expected task outcome.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1903,7 +2071,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1923,7 +2093,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1956,7 +2128,9 @@ "display_name": "Previous Task", "dynamic": false, "info": "The previous task in the sequence (for chaining).", - "input_types": ["SequentialTask"], + "input_types": [ + "SequentialTask" + ], "list": false, "name": "previous_task", "placeholder": "", @@ -1973,7 +2147,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1993,7 +2169,9 @@ "display_name": "Task Description", "dynamic": false, "info": "Descriptive text detailing task's purpose and execution.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2013,7 +2191,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agent's disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -2063,7 +2243,9 @@ "data": { "id": "SequentialTaskAgentComponent-TPEWE", "node": { - "base_classes": ["SequentialTask"], + "base_classes": [ + "SequentialTask" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2099,7 +2281,9 @@ "method": "build_agent_and_task", "name": "task_output", "selected": "SequentialTask", - "types": ["SequentialTask"], + "types": [ + "SequentialTask" + ], "value": "__UNDEFINED__" } ], @@ -2176,7 +2360,9 @@ "display_name": "Backstory", "dynamic": false, "info": "The backstory of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2214,7 +2400,9 @@ "display_name": "Expected Task Output", "dynamic": false, "info": "Clear definition of expected task outcome.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2234,7 +2422,9 @@ "display_name": "Goal", "dynamic": false, "info": "The objective of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2254,7 +2444,9 @@ "display_name": "Language Model", "dynamic": false, "info": "Language model that will run the agent.", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -2287,7 +2479,9 @@ "display_name": "Previous Task", "dynamic": false, "info": "The previous task in the sequence (for chaining).", - "input_types": ["SequentialTask"], + "input_types": [ + "SequentialTask" + ], "list": false, "name": "previous_task", "placeholder": "", @@ -2304,7 +2498,9 @@ "display_name": "Role", "dynamic": false, "info": "The role of the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2324,7 +2520,9 @@ "display_name": "Task Description", "dynamic": false, "info": "Descriptive text detailing task's purpose and execution.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2344,7 +2542,9 @@ "display_name": "Tools", "dynamic": false, "info": "Tools at agent's disposal", - "input_types": ["Tool"], + "input_types": [ + "Tool" + ], "list": true, "name": "tools", "placeholder": "", @@ -2394,7 +2594,9 @@ "data": { "id": "YFinanceTool-Asoka", "node": { - "base_classes": ["Tool"], + "base_classes": [ + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2414,7 +2616,9 @@ "method": "run_model", "name": "api_run_model", "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -2423,7 +2627,9 @@ "method": "build_tool", "name": "tool", "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -2454,7 +2660,9 @@ "display_name": "Query", "dynamic": false, "info": "Input should be a company ticker. For example, AAPL for Apple, MSFT for Microsoft.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json b/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json index da70e5fb3..fb06e377f 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json @@ -8,12 +8,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-gRakF", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "ToolCallingAgent-0QzrL", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -30,12 +34,16 @@ "dataType": "ChatInput", "id": "ChatInput-uYdzQ", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ToolCallingAgent-0QzrL", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -52,12 +60,16 @@ "dataType": "ToolCallingAgent", "id": "ToolCallingAgent-KLe5u", "name": "response", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ToolCallingAgent-VYDK9", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -74,12 +86,16 @@ "dataType": "ToolCallingAgent", "id": "ToolCallingAgent-0QzrL", "name": "response", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ToolCallingAgent-KLe5u", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -96,12 +112,17 @@ "dataType": "SearchAPI", "id": "SearchAPI-I4yU0", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-0QzrL", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -118,12 +139,17 @@ "dataType": "url_content_fetcher", "id": "url_content_fetcher-1FugB", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-0QzrL", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -140,12 +166,17 @@ "dataType": "SearchAPI", "id": "SearchAPI-I4yU0", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-KLe5u", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -162,12 +193,17 @@ "dataType": "url_content_fetcher", "id": "url_content_fetcher-1FugB", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-KLe5u", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -184,12 +220,17 @@ "dataType": "url_content_fetcher", "id": "url_content_fetcher-1FugB", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-VYDK9", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -206,12 +247,17 @@ "dataType": "SearchAPI", "id": "SearchAPI-I4yU0", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-VYDK9", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -228,12 +274,16 @@ "dataType": "ToolCallingAgent", "id": "ToolCallingAgent-VYDK9", "name": "response", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-O63dG", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -250,12 +300,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-gRakF", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "ToolCallingAgent-VYDK9", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -272,12 +326,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-gRakF", "name": "model_output", - "output_types": ["LanguageModel"] + "output_types": [ + "LanguageModel" + ] }, "targetHandle": { "fieldName": "llm", "id": "ToolCallingAgent-KLe5u", - "inputTypes": ["LanguageModel"], + "inputTypes": [ + "LanguageModel" + ], "type": "other" } }, @@ -294,12 +352,17 @@ "dataType": "CalculatorTool", "id": "CalculatorTool-5S6u9", "name": "api_build_tool", - "output_types": ["Tool"] + "output_types": [ + "Tool" + ] }, "targetHandle": { "fieldName": "tools", "id": "ToolCallingAgent-VYDK9", - "inputTypes": ["Tool", "BaseTool"], + "inputTypes": [ + "Tool", + "BaseTool" + ], "type": "other" } }, @@ -315,7 +378,9 @@ "data": { "id": "ChatInput-uYdzQ", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -343,7 +408,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -366,7 +433,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "_input_type": "FileInput", @@ -416,7 +483,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -438,7 +507,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -453,7 +525,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -472,7 +546,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -524,7 +600,9 @@ "data": { "id": "ChatOutput-O63dG", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -552,7 +630,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -575,7 +655,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "_input_type": "MessageTextInput", @@ -583,7 +663,9 @@ "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -602,7 +684,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -623,7 +707,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -638,7 +725,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -657,7 +746,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -711,7 +802,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-gRakF", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -743,9 +837,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -765,7 +865,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -778,7 +880,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -813,7 +917,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -929,7 +1035,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -994,7 +1102,9 @@ "display_name": "System Message", "dynamic": false, "info": "System message to pass to the model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "system_message", @@ -1046,7 +1156,10 @@ "data": { "id": "ToolCallingAgent-0QzrL", "node": { - "base_classes": ["AgentExecutor", "Message"], + "base_classes": [ + "AgentExecutor", + "Message" + ], "beta": true, "conditional_paths": [], "custom_fields": {}, @@ -1076,7 +1189,9 @@ "method": "build_agent", "name": "agent", "selected": "AgentExecutor", - "types": ["AgentExecutor"], + "types": [ + "AgentExecutor" + ], "value": "__UNDEFINED__" }, { @@ -1085,7 +1200,9 @@ "method": "message_response", "name": "response", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1098,7 +1215,9 @@ "display_name": "Chat History", "dynamic": false, "info": "", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": true, "name": "chat_history", "placeholder": "", @@ -1150,7 +1269,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -1169,7 +1290,9 @@ "display_name": "Language Model", "dynamic": false, "info": "", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1202,7 +1325,9 @@ "display_name": "System Prompt", "dynamic": false, "info": "System prompt for the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1222,7 +1347,10 @@ "display_name": "Tools", "dynamic": false, "info": "", - "input_types": ["Tool", "BaseTool"], + "input_types": [ + "Tool", + "BaseTool" + ], "list": true, "load_from_db": false, "name": "tools", @@ -1240,7 +1368,9 @@ "display_name": "Prompt", "dynamic": false, "info": "This prompt must contain 'input' key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1293,7 +1423,11 @@ "data": { "id": "SearchAPI-I4yU0", "node": { - "base_classes": ["Data", "list", "Tool"], + "base_classes": [ + "Data", + "list", + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1327,7 +1461,9 @@ "search_params" ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -1344,7 +1480,9 @@ "search_params" ], "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -1357,7 +1495,9 @@ "display_name": "SearchAPI API Key", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -1392,7 +1532,9 @@ "display_name": "Engine", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "engine", @@ -1411,7 +1553,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1496,7 +1640,11 @@ "data": { "id": "url_content_fetcher-1FugB", "node": { - "base_classes": ["Data", "list", "Tool"], + "base_classes": [ + "Data", + "list", + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1504,7 +1652,10 @@ "display_name": "URL Content Fetcher", "documentation": "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base", "edited": true, - "field_order": ["url", "fetch_params"], + "field_order": [ + "url", + "fetch_params" + ], "frozen": false, "icon": "globe", "lf_version": "1.0.15", @@ -1517,7 +1668,10 @@ "method": "run_model", "name": "api_run_model", "selected": "Data", - "types": ["Data", "list"], + "types": [ + "Data", + "list" + ], "value": "__UNDEFINED__" }, { @@ -1526,7 +1680,9 @@ "method": "build_tool", "name": "api_build_tool", "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -1573,7 +1729,9 @@ "display_name": "URL", "dynamic": false, "info": "Enter a single URL to fetch content from.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "url", @@ -1609,7 +1767,10 @@ "data": { "id": "ToolCallingAgent-KLe5u", "node": { - "base_classes": ["AgentExecutor", "Message"], + "base_classes": [ + "AgentExecutor", + "Message" + ], "beta": true, "conditional_paths": [], "custom_fields": {}, @@ -1639,7 +1800,9 @@ "method": "build_agent", "name": "agent", "selected": "AgentExecutor", - "types": ["AgentExecutor"], + "types": [ + "AgentExecutor" + ], "value": "__UNDEFINED__" }, { @@ -1648,7 +1811,9 @@ "method": "message_response", "name": "response", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1661,7 +1826,9 @@ "display_name": "Chat History", "dynamic": false, "info": "", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": true, "name": "chat_history", "placeholder": "", @@ -1713,7 +1880,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -1732,7 +1901,9 @@ "display_name": "Language Model", "dynamic": false, "info": "", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -1765,7 +1936,9 @@ "display_name": "System Prompt", "dynamic": false, "info": "System prompt for the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1785,7 +1958,10 @@ "display_name": "Tools", "dynamic": false, "info": "", - "input_types": ["Tool", "BaseTool"], + "input_types": [ + "Tool", + "BaseTool" + ], "list": true, "load_from_db": false, "name": "tools", @@ -1803,7 +1979,9 @@ "display_name": "Prompt", "dynamic": false, "info": "This prompt must contain 'input' key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1856,7 +2034,10 @@ "data": { "id": "ToolCallingAgent-VYDK9", "node": { - "base_classes": ["AgentExecutor", "Message"], + "base_classes": [ + "AgentExecutor", + "Message" + ], "beta": true, "conditional_paths": [], "custom_fields": {}, @@ -1886,7 +2067,9 @@ "method": "build_agent", "name": "agent", "selected": "AgentExecutor", - "types": ["AgentExecutor"], + "types": [ + "AgentExecutor" + ], "value": "__UNDEFINED__" }, { @@ -1895,7 +2078,9 @@ "method": "message_response", "name": "response", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1908,7 +2093,9 @@ "display_name": "Chat History", "dynamic": false, "info": "", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": true, "name": "chat_history", "placeholder": "", @@ -1960,7 +2147,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -1979,7 +2168,9 @@ "display_name": "Language Model", "dynamic": false, "info": "", - "input_types": ["LanguageModel"], + "input_types": [ + "LanguageModel" + ], "list": false, "name": "llm", "placeholder": "", @@ -2012,7 +2203,9 @@ "display_name": "System Prompt", "dynamic": false, "info": "System prompt for the agent.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2032,7 +2225,10 @@ "display_name": "Tools", "dynamic": false, "info": "", - "input_types": ["Tool", "BaseTool"], + "input_types": [ + "Tool", + "BaseTool" + ], "list": true, "load_from_db": false, "name": "tools", @@ -2050,7 +2246,9 @@ "display_name": "Prompt", "dynamic": false, "info": "This prompt must contain 'input' key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2103,7 +2301,12 @@ "data": { "id": "CalculatorTool-5S6u9", "node": { - "base_classes": ["Data", "list", "Sequence", "Tool"], + "base_classes": [ + "Data", + "list", + "Sequence", + "Tool" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2111,7 +2314,9 @@ "display_name": "Calculator", "documentation": "", "edited": false, - "field_order": ["expression"], + "field_order": [ + "expression" + ], "frozen": false, "icon": "calculator", "lf_version": "1.0.15", @@ -2124,9 +2329,13 @@ "display_name": "Data", "method": "run_model", "name": "api_run_model", - "required_inputs": ["expression"], + "required_inputs": [ + "expression" + ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -2134,9 +2343,13 @@ "display_name": "Tool", "method": "build_tool", "name": "api_build_tool", - "required_inputs": ["expression"], + "required_inputs": [ + "expression" + ], "selected": "Tool", - "types": ["Tool"], + "types": [ + "Tool" + ], "value": "__UNDEFINED__" } ], @@ -2167,7 +2380,9 @@ "display_name": "Expression", "dynamic": false, "info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "expression", diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json index 6868c4554..c2225cdcc 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json @@ -8,12 +8,16 @@ "dataType": "ChatInput", "id": "ChatInput-jvvNM", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "search_input", "id": "AstraVectorStoreComponent-0WT85", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -30,12 +34,17 @@ "dataType": "ParseData", "id": "ParseData-f6Jhl", "name": "text", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "context", "id": "Prompt-oJ8Eh", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -52,12 +61,17 @@ "dataType": "ChatInput", "id": "ChatInput-jvvNM", "name": "message", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "question", "id": "Prompt-oJ8Eh", - "inputTypes": ["Message", "Text"], + "inputTypes": [ + "Message", + "Text" + ], "type": "str" } }, @@ -74,12 +88,16 @@ "dataType": "File", "id": "File-JZzdd", "name": "data", - "output_types": ["Data"] + "output_types": [ + "Data" + ] }, "targetHandle": { "fieldName": "data_inputs", "id": "SplitText-Kl7VQ", - "inputTypes": ["Data"], + "inputTypes": [ + "Data" + ], "type": "other" } }, @@ -96,12 +114,16 @@ "dataType": "SplitText", "id": "SplitText-Kl7VQ", "name": "chunks", - "output_types": ["Data"] + "output_types": [ + "Data" + ] }, "targetHandle": { "fieldName": "ingest_data", "id": "AstraVectorStoreComponent-EUhWZ", - "inputTypes": ["Data"], + "inputTypes": [ + "Data" + ], "type": "other" } }, @@ -118,12 +140,16 @@ "dataType": "OpenAIEmbeddings", "id": "OpenAIEmbeddings-Mf9md", "name": "embeddings", - "output_types": ["Embeddings"] + "output_types": [ + "Embeddings" + ] }, "targetHandle": { "fieldName": "embedding", "id": "AstraVectorStoreComponent-EUhWZ", - "inputTypes": ["Embeddings"], + "inputTypes": [ + "Embeddings" + ], "type": "other" } }, @@ -140,12 +166,16 @@ "dataType": "OpenAIEmbeddings", "id": "OpenAIEmbeddings-q2lBh", "name": "embeddings", - "output_types": ["Embeddings"] + "output_types": [ + "Embeddings" + ] }, "targetHandle": { "fieldName": "embedding", "id": "AstraVectorStoreComponent-0WT85", - "inputTypes": ["Embeddings"], + "inputTypes": [ + "Embeddings" + ], "type": "other" } }, @@ -162,12 +192,16 @@ "dataType": "Prompt", "id": "Prompt-oJ8Eh", "name": "prompt", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "OpenAIModel-3v8LQ", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -184,12 +218,16 @@ "dataType": "OpenAIModel", "id": "OpenAIModel-3v8LQ", "name": "text_output", - "output_types": ["Message"] + "output_types": [ + "Message" + ] }, "targetHandle": { "fieldName": "input_value", "id": "ChatOutput-oPZbw", - "inputTypes": ["Message"], + "inputTypes": [ + "Message" + ], "type": "str" } }, @@ -206,12 +244,16 @@ "dataType": "AstraVectorStoreComponent", "id": "AstraVectorStoreComponent-0WT85", "name": "search_results", - "output_types": ["Data"] + "output_types": [ + "Data" + ] }, "targetHandle": { "fieldName": "data", "id": "ParseData-f6Jhl", - "inputTypes": ["Data"], + "inputTypes": [ + "Data" + ], "type": "other" } }, @@ -229,7 +271,9 @@ "display_name": "Chat Input", "id": "ChatInput-jvvNM", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -256,7 +300,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -279,7 +325,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -327,7 +373,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -347,7 +395,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -361,7 +412,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -379,7 +432,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -433,7 +488,10 @@ "edited": false, "id": "AstraVectorStoreComponent-0WT85", "node": { - "base_classes": ["Data", "Retriever"], + "base_classes": [ + "Data", + "Retriever" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -476,7 +534,9 @@ "name": "base_retriever", "required_inputs": [], "selected": "Retriever", - "types": ["Retriever"], + "types": [ + "Retriever" + ], "value": "__UNDEFINED__" }, { @@ -508,7 +568,9 @@ "token" ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -518,7 +580,9 @@ "name": "vector_store", "required_inputs": [], "selected": "VectorStore", - "types": ["VectorStore"], + "types": [ + "VectorStore" + ], "value": "__UNDEFINED__" } ], @@ -530,7 +594,9 @@ "display_name": "API Endpoint", "dynamic": false, "info": "API endpoint URL for the Astra DB service.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_endpoint", "password": true, @@ -656,7 +722,9 @@ "display_name": "Embedding Model", "dynamic": false, "info": "Allows an embedding model configuration.", - "input_types": ["Embeddings"], + "input_types": [ + "Embeddings" + ], "list": false, "name": "embedding", "placeholder": "", @@ -675,7 +743,10 @@ "dynamic": false, "info": "Determines whether to use Astra Vectorize for the collection.", "name": "embedding_service", - "options": ["Embedding Model", "Astra Vectorize"], + "options": [ + "Embedding Model", + "Astra Vectorize" + ], "placeholder": "", "real_time_refresh": true, "required": false, @@ -690,7 +761,9 @@ "display_name": "Ingest Data", "dynamic": false, "info": "", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": true, "name": "ingest_data", "placeholder": "", @@ -740,7 +813,11 @@ "dynamic": false, "info": "Optional distance metric for vector comparisons in the vector store.", "name": "metric", - "options": ["cosine", "dot_product", "euclidean"], + "options": [ + "cosine", + "dot_product", + "euclidean" + ], "placeholder": "", "required": false, "show": true, @@ -815,7 +892,9 @@ "display_name": "Search Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -869,7 +948,11 @@ "dynamic": false, "info": "Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", "name": "setup_mode", - "options": ["Sync", "Async", "Off"], + "options": [ + "Sync", + "Async", + "Off" + ], "placeholder": "", "required": false, "show": true, @@ -883,7 +966,9 @@ "display_name": "Astra DB Application Token", "dynamic": false, "info": "Authentication token for accessing Astra DB.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "token", "password": true, @@ -919,7 +1004,9 @@ "display_name": "Parse Data", "id": "ParseData-f6Jhl", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -927,7 +1014,11 @@ "display_name": "Parse Data", "documentation": "", "edited": false, - "field_order": ["data", "template", "sep"], + "field_order": [ + "data", + "template", + "sep" + ], "frozen": false, "icon": "braces", "metadata": {}, @@ -939,7 +1030,9 @@ "method": "parse_data", "name": "text", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -969,7 +1062,9 @@ "display_name": "Data", "dynamic": false, "info": "The data to convert to text.", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": false, "name": "data", "placeholder": "", @@ -1002,7 +1097,9 @@ "display_name": "Template", "dynamic": false, "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1041,17 +1138,24 @@ "display_name": "Prompt", "id": "Prompt-oJ8Eh", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": { - "template": ["context", "question"] + "template": [ + "context", + "question" + ] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": ["template"], + "field_order": [ + "template" + ], "frozen": false, "icon": "prompts", "metadata": {}, @@ -1063,7 +1167,9 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1096,7 +1202,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1117,7 +1226,10 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": ["Message", "Text"], + "input_types": [ + "Message", + "Text" + ], "list": false, "load_from_db": false, "multiline": true, @@ -1171,7 +1283,9 @@ "display_name": "Chat Output", "id": "ChatOutput-oPZbw", "node": { - "base_classes": ["Message"], + "base_classes": [ + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1198,7 +1312,9 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" } ], @@ -1221,14 +1337,16 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.store_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "data_template", @@ -1246,7 +1364,9 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -1265,7 +1385,10 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": ["Machine", "User"], + "options": [ + "Machine", + "User" + ], "placeholder": "", "required": false, "show": true, @@ -1279,7 +1402,9 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "sender_name", @@ -1297,7 +1422,9 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "session_id", @@ -1350,7 +1477,9 @@ "display_name": "Split Text", "id": "SplitText-Kl7VQ", "node": { - "base_classes": ["Data"], + "base_classes": [ + "Data" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1375,7 +1504,9 @@ "method": "split_text", "name": "chunks", "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" } ], @@ -1435,7 +1566,9 @@ "display_name": "Data Inputs", "dynamic": false, "info": "The data to split.", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": true, "name": "data_inputs", "placeholder": "", @@ -1451,7 +1584,9 @@ "display_name": "Separator", "dynamic": false, "info": "The character to split on. Defaults to newline.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "separator", @@ -1489,7 +1624,9 @@ "display_name": "File", "id": "File-JZzdd", "node": { - "base_classes": ["Data"], + "base_classes": [ + "Data" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1497,7 +1634,10 @@ "display_name": "File", "documentation": "", "edited": false, - "field_order": ["path", "silent_errors"], + "field_order": [ + "path", + "silent_errors" + ], "frozen": false, "icon": "file-text", "metadata": {}, @@ -1509,7 +1649,9 @@ "method": "load_file", "name": "data", "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" } ], @@ -1611,7 +1753,10 @@ "edited": false, "id": "AstraVectorStoreComponent-EUhWZ", "node": { - "base_classes": ["Data", "Retriever"], + "base_classes": [ + "Data", + "Retriever" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -1654,7 +1799,9 @@ "name": "base_retriever", "required_inputs": [], "selected": "Retriever", - "types": ["Retriever"], + "types": [ + "Retriever" + ], "value": "__UNDEFINED__" }, { @@ -1686,7 +1833,9 @@ "token" ], "selected": "Data", - "types": ["Data"], + "types": [ + "Data" + ], "value": "__UNDEFINED__" }, { @@ -1696,7 +1845,9 @@ "name": "vector_store", "required_inputs": [], "selected": "VectorStore", - "types": ["VectorStore"], + "types": [ + "VectorStore" + ], "value": "__UNDEFINED__" } ], @@ -1708,7 +1859,9 @@ "display_name": "API Endpoint", "dynamic": false, "info": "API endpoint URL for the Astra DB service.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_endpoint", "password": true, @@ -1834,7 +1987,9 @@ "display_name": "Embedding Model", "dynamic": false, "info": "Allows an embedding model configuration.", - "input_types": ["Embeddings"], + "input_types": [ + "Embeddings" + ], "list": false, "name": "embedding", "placeholder": "", @@ -1853,7 +2008,10 @@ "dynamic": false, "info": "Determines whether to use Astra Vectorize for the collection.", "name": "embedding_service", - "options": ["Embedding Model", "Astra Vectorize"], + "options": [ + "Embedding Model", + "Astra Vectorize" + ], "placeholder": "", "real_time_refresh": true, "required": false, @@ -1868,7 +2026,9 @@ "display_name": "Ingest Data", "dynamic": false, "info": "", - "input_types": ["Data"], + "input_types": [ + "Data" + ], "list": true, "name": "ingest_data", "placeholder": "", @@ -1918,7 +2078,11 @@ "dynamic": false, "info": "Optional distance metric for vector comparisons in the vector store.", "name": "metric", - "options": ["cosine", "dot_product", "euclidean"], + "options": [ + "cosine", + "dot_product", + "euclidean" + ], "placeholder": "", "required": false, "show": true, @@ -1993,7 +2157,9 @@ "display_name": "Search Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "multiline": true, @@ -2047,7 +2213,11 @@ "dynamic": false, "info": "Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", "name": "setup_mode", - "options": ["Sync", "Async", "Off"], + "options": [ + "Sync", + "Async", + "Off" + ], "placeholder": "", "required": false, "show": true, @@ -2061,7 +2231,9 @@ "display_name": "Astra DB Application Token", "dynamic": false, "info": "Authentication token for accessing Astra DB.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "token", "password": true, @@ -2097,7 +2269,9 @@ "display_name": "OpenAI Embeddings", "id": "OpenAIEmbeddings-Mf9md", "node": { - "base_classes": ["Embeddings"], + "base_classes": [ + "Embeddings" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2161,7 +2335,9 @@ "tiktoken_model_name" ], "selected": "Embeddings", - "types": ["Embeddings"], + "types": [ + "Embeddings" + ], "value": "__UNDEFINED__" } ], @@ -2188,7 +2364,9 @@ "display_name": "Client", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "client", @@ -2254,7 +2432,9 @@ "display_name": "Deployment", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "deployment", @@ -2351,7 +2531,9 @@ "display_name": "OpenAI API Base", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "openai_api_base", "password": true, @@ -2367,7 +2549,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "openai_api_key", "password": true, @@ -2383,7 +2567,9 @@ "display_name": "OpenAI API Type", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "openai_api_type", "password": true, @@ -2399,7 +2585,9 @@ "display_name": "OpenAI API Version", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "openai_api_version", @@ -2417,7 +2605,9 @@ "display_name": "OpenAI Organization", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "openai_organization", @@ -2435,7 +2625,9 @@ "display_name": "OpenAI Proxy", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "openai_proxy", @@ -2513,7 +2705,9 @@ "display_name": "TikToken Model Name", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "tiktoken_model_name", @@ -2551,7 +2745,9 @@ "display_name": "OpenAI Embeddings", "id": "OpenAIEmbeddings-q2lBh", "node": { - "base_classes": ["Embeddings"], + "base_classes": [ + "Embeddings" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2615,7 +2811,9 @@ "tiktoken_model_name" ], "selected": "Embeddings", - "types": ["Embeddings"], + "types": [ + "Embeddings" + ], "value": "__UNDEFINED__" } ], @@ -2642,7 +2840,9 @@ "display_name": "Client", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "client", @@ -2708,7 +2908,9 @@ "display_name": "Deployment", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "deployment", @@ -2805,7 +3007,9 @@ "display_name": "OpenAI API Base", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "openai_api_base", "password": true, @@ -2821,7 +3025,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "openai_api_key", "password": true, @@ -2837,7 +3043,9 @@ "display_name": "OpenAI API Type", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "openai_api_type", "password": true, @@ -2853,7 +3061,9 @@ "display_name": "OpenAI API Version", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "openai_api_version", @@ -2871,7 +3081,9 @@ "display_name": "OpenAI Organization", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "openai_organization", @@ -2889,7 +3101,9 @@ "display_name": "OpenAI Proxy", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "openai_proxy", @@ -2967,7 +3181,9 @@ "display_name": "TikToken Model Name", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "tiktoken_model_name", @@ -3005,7 +3221,10 @@ "display_name": "OpenAI", "id": "OpenAIModel-3v8LQ", "node": { - "base_classes": ["LanguageModel", "Message"], + "base_classes": [ + "LanguageModel", + "Message" + ], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -3037,9 +3256,15 @@ "display_name": "Text", "method": "text_response", "name": "text_output", - "required_inputs": ["input_value", "stream", "system_message"], + "required_inputs": [ + "input_value", + "stream", + "system_message" + ], "selected": "Message", - "types": ["Message"], + "types": [ + "Message" + ], "value": "__UNDEFINED__" }, { @@ -3059,7 +3284,9 @@ "temperature" ], "selected": "LanguageModel", - "types": ["LanguageModel"], + "types": [ + "LanguageModel" + ], "value": "__UNDEFINED__" } ], @@ -3071,7 +3298,9 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "load_from_db": true, "name": "api_key", "password": true, @@ -3105,7 +3334,9 @@ "display_name": "Input", "dynamic": false, "info": "", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "input_value", @@ -3209,7 +3440,9 @@ "display_name": "Output Parser", "dynamic": false, "info": "The parser to use to parse the output of the model", - "input_types": ["OutputParser"], + "input_types": [ + "OutputParser" + ], "list": false, "name": "output_parser", "placeholder": "", @@ -3270,7 +3503,9 @@ "display_name": "System Message", "dynamic": false, "info": "System message to pass to the model.", - "input_types": ["Message"], + "input_types": [ + "Message" + ], "list": false, "load_from_db": false, "name": "system_message", diff --git a/src/backend/base/langflow/memory.py b/src/backend/base/langflow/memory.py index f9600128d..72d81e16c 100644 --- a/src/backend/base/langflow/memory.py +++ b/src/backend/base/langflow/memory.py @@ -36,7 +36,7 @@ def get_messages( List[Data]: A list of Data objects representing the retrieved messages. """ with session_scope() as session: - stmt = select(MessageTable) + stmt = select(MessageTable).where(MessageTable.error == False) # noqa: E712 if sender: stmt = stmt.where(MessageTable.sender == sender) if sender_name: @@ -142,7 +142,7 @@ class LCBuiltinChatMemory(BaseChatMessageHistory): messages = get_messages( session_id=self.session_id, ) - return [m.to_lc_message() for m in messages] + return [m.to_lc_message() for m in messages if not m.error] # Exclude error messages def add_messages(self, messages: Sequence[BaseMessage]) -> None: for lc_message in messages: diff --git a/src/backend/base/langflow/schema/message.py b/src/backend/base/langflow/schema/message.py index 52587b0c9..6d9e398ad 100644 --- a/src/backend/base/langflow/schema/message.py +++ b/src/backend/base/langflow/schema/message.py @@ -54,6 +54,8 @@ class Message(Data): default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S") ) flow_id: str | UUID | None = None + error: bool = Field(default=False) + edit: bool = Field(default=False) @field_validator("flow_id", mode="before") @classmethod @@ -64,10 +66,14 @@ class Message(Data): @field_serializer("flow_id") def serialize_flow_id(value): - if isinstance(value, str): - return UUID(value) + if isinstance(value, UUID): + return str(value) return value + @field_serializer("timestamp") + def serialize_timestamp(value): + return datetime.strptime(value, "%Y-%m-%d %H:%M:%S").astimezone(timezone.utc) + @field_validator("files", mode="before") @classmethod def validate_files(cls, value): @@ -154,6 +160,8 @@ class Message(Data): session_id=data.session_id, timestamp=data.timestamp, flow_id=data.flow_id, + error=data.error, + edit=data.edit, ) @field_serializer("text", mode="plain") diff --git a/src/backend/base/langflow/services/database/models/message/crud.py b/src/backend/base/langflow/services/database/models/message/crud.py index 9b408da80..840d015ba 100644 --- a/src/backend/base/langflow/services/database/models/message/crud.py +++ b/src/backend/base/langflow/services/database/models/message/crud.py @@ -4,7 +4,7 @@ from langflow.services.database.models.message.model import MessageTable, Messag from langflow.services.deps import session_scope -def update_message(message_id: UUID, message: MessageUpdate | dict): +def update_message(message_id: UUID | str, message: MessageUpdate | dict): if not isinstance(message, MessageUpdate): message = MessageUpdate(**message) with session_scope() as session: diff --git a/src/backend/base/langflow/services/database/models/message/model.py b/src/backend/base/langflow/services/database/models/message/model.py index 52237b985..89d26bab6 100644 --- a/src/backend/base/langflow/services/database/models/message/model.py +++ b/src/backend/base/langflow/services/database/models/message/model.py @@ -18,6 +18,8 @@ class MessageBase(SQLModel): session_id: str text: str = Field(sa_column=Column(Text)) files: list[str] = Field(default_factory=list) + error: bool = Field(default=False) + edit: bool = Field(default=False) @field_validator("files", mode="before") @classmethod @@ -100,3 +102,5 @@ class MessageUpdate(SQLModel): sender_name: str | None = None session_id: str | None = None files: list[str] | None = None + edit: bool | None = None + error: bool | None = None diff --git a/src/frontend/src/components/renderIconComponent/index.tsx b/src/frontend/src/components/renderIconComponent/index.tsx index d64426b40..60ba498a7 100644 --- a/src/frontend/src/components/renderIconComponent/index.tsx +++ b/src/frontend/src/components/renderIconComponent/index.tsx @@ -3,7 +3,7 @@ import { addPlusSignes, cn, sortShortcuts } from "@/utils/utils"; import RenderKey from "./components/renderKey"; export default function RenderIcons({ - filteredShortcut, + filteredShortcut = [], tableRender = false, }: { filteredShortcut: string[]; diff --git a/src/frontend/src/controllers/API/queries/messages/use-get-messages.ts b/src/frontend/src/controllers/API/queries/messages/use-get-messages.ts index 380d22ed7..c0be11627 100644 --- a/src/frontend/src/controllers/API/queries/messages/use-get-messages.ts +++ b/src/frontend/src/controllers/API/queries/messages/use-get-messages.ts @@ -40,7 +40,6 @@ export const useGetMessagesQuery: useQueryFunctionType< const data = await getMessagesFn(id, params); const columns = extractColumnsFromRows(data.data, mode, excludedFields); useMessagesStore.getState().setMessages(data.data); - useMessagesStore.getState().setColumns(columns); return { rows: data, columns }; }; diff --git a/src/frontend/src/controllers/API/queries/messages/use-put-update-messages.ts b/src/frontend/src/controllers/API/queries/messages/use-put-update-messages.ts index 9f931e35a..1ea24cda5 100644 --- a/src/frontend/src/controllers/API/queries/messages/use-put-update-messages.ts +++ b/src/frontend/src/controllers/API/queries/messages/use-put-update-messages.ts @@ -1,3 +1,4 @@ +import useFlowsManagerStore from "@/stores/flowsManagerStore"; import { useMutationFunctionType } from "@/types/api"; import { Message } from "@/types/messages"; import { UseMutationResult } from "@tanstack/react-query"; @@ -6,28 +7,45 @@ import { getURL } from "../../helpers/constants"; import { UseRequestProcessor } from "../../services/request-processor"; interface UpdateMessageParams { - message: Message; + message: Partial; + refetch?: boolean; } export const useUpdateMessage: useMutationFunctionType< undefined, UpdateMessageParams > = (options?) => { - const { mutate } = UseRequestProcessor(); + const { mutate, queryClient } = UseRequestProcessor(); - const updateMessageApi = async (data: Message) => { - if (data.files && typeof data.files === "string") { - data.files = JSON.parse(data.files); + const updateMessageApi = async (data: UpdateMessageParams) => { + const message = data.message; + if (message.files && typeof message.files === "string") { + message.files = JSON.parse(message.files); } - const result = await api.put(`${getURL("MESSAGES")}/${data.id}`, data); + const result = await api.put( + `${getURL("MESSAGES")}/${message.id}`, + message, + ); return result.data; }; - const mutation: UseMutationResult< - UpdateMessageParams, - any, - UpdateMessageParams - > = mutate(["useUpdateMessages"], updateMessageApi, options); + const mutation: UseMutationResult = mutate( + ["useUpdateMessages"], + updateMessageApi, + { + ...options, + onSettled: (_, __, params, ___) => { + const flowId = useFlowsManagerStore.getState().currentFlowId; + //@ts-ignore + if (params?.refetch && flowId) { + queryClient.refetchQueries({ + queryKey: ["useGetMessagesQuery", { id: flowId }], + exact: true, + }); + } + }, + }, + ); return mutation; }; diff --git a/src/frontend/src/controllers/API/queries/messages/use-rename-session.ts b/src/frontend/src/controllers/API/queries/messages/use-rename-session.ts new file mode 100644 index 000000000..51f088eda --- /dev/null +++ b/src/frontend/src/controllers/API/queries/messages/use-rename-session.ts @@ -0,0 +1,42 @@ +import { useMutationFunctionType } from "@/types/api"; +import { Message } from "@/types/messages"; +import { UseMutationResult } from "@tanstack/react-query"; +import { api } from "../../api"; +import { getURL } from "../../helpers/constants"; +import { UseRequestProcessor } from "../../services/request-processor"; + +interface UpdateSessionParams { + old_session_id: string; + new_session_id: string; +} + +export const useUpdateSessionName: useMutationFunctionType< + undefined, + UpdateSessionParams +> = (options?) => { + const { mutate, queryClient } = UseRequestProcessor(); + + const updateSessionApi = async (data: UpdateSessionParams) => { + const result = await api.patch( + `${getURL("MESSAGES")}/session/${data.old_session_id}`, + null, + { + params: { new_session_id: data.new_session_id }, + }, + ); + return result.data; + }; + + const mutation: UseMutationResult = + mutate(["useUpdateSessionName"], updateSessionApi, { + ...options, + onSettled: (data, variables, context) => { + // Invalidate and refetch relevant queries + queryClient.refetchQueries({ + queryKey: ["useGetMessagesQuery"], + }); + }, + }); + + return mutation; +}; diff --git a/src/frontend/src/modals/IOModal/components/IOFieldView/components/sessionSelector/index.tsx b/src/frontend/src/modals/IOModal/components/IOFieldView/components/sessionSelector/index.tsx new file mode 100644 index 000000000..0e489737b --- /dev/null +++ b/src/frontend/src/modals/IOModal/components/IOFieldView/components/sessionSelector/index.tsx @@ -0,0 +1,214 @@ +import IconComponent from "@/components/genericIconComponent"; +import ShadTooltip from "@/components/shadTooltipComponent"; +import { Badge } from "@/components/ui/badge"; +import { Input } from "@/components/ui/input"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, +} from "@/components/ui/select-custom"; +import { useUpdateSessionName } from "@/controllers/API/queries/messages/use-rename-session"; +import useFlowStore from "@/stores/flowStore"; +import { cn } from "@/utils/utils"; +import React, { useEffect, useRef, useState } from "react"; + +export default function SessionSelector({ + deleteSession, + session, + toggleVisibility, + isVisible, + inspectSession, + updateVisibleSession, + selectedView, + setSelectedView, +}: { + deleteSession: (session: string) => void; + session: string; + toggleVisibility: () => void; + isVisible: boolean; + inspectSession: (session: string) => void; + updateVisibleSession: (session: string) => void; + selectedView?: { type: string; id: string }; + setSelectedView: (view: { type: string; id: string } | undefined) => void; +}) { + const currentFlowId = useFlowStore((state) => state.currentFlow?.id); + const [isEditing, setIsEditing] = useState(false); + const [editedSession, setEditedSession] = useState(session); + const { mutate: updateSessionName } = useUpdateSessionName(); + const inputRef = useRef(null); + + useEffect(() => { + setEditedSession(session); + }, [session]); + + const handleEditClick = (e?: React.MouseEvent) => { + e?.stopPropagation(); + setIsEditing(true); + }; + + const handleInputChange = (e: React.ChangeEvent) => { + setEditedSession(e.target.value); + }; + + const handleConfirm = () => { + setIsEditing(false); + if (editedSession.trim() !== session) { + updateSessionName( + { old_session_id: session, new_session_id: editedSession.trim() }, + { + onSuccess: () => { + if (isVisible) { + updateVisibleSession(editedSession); + } + if ( + selectedView?.type === "Session" && + selectedView?.id === session + ) { + setSelectedView({ type: "Session", id: editedSession }); + } + }, + }, + ); + } + }; + + const handleCancel = () => { + setIsEditing(false); + setEditedSession(session); + }; + + const handleSelectChange = (value: string) => { + switch (value) { + case "rename": + handleEditClick(); + break; + case "messageLogs": + inspectSession(session); + break; + case "delete": + deleteSession(session); + break; + } + }; + + return ( +
{ + if (isEditing) e.stopPropagation(); + else toggleVisibility(); + }} + className={cn( + "file-component-accordion-div group cursor-pointer rounded-md hover:bg-muted-foreground/30", + isVisible ? "bg-muted-foreground/15" : "", + )} + > +
+
+ {isEditing ? ( +
+ { + if (e.key === "Enter") { + e.preventDefault(); + e.stopPropagation(); + handleConfirm(); + } + }} + onChange={handleInputChange} + onBlur={(e) => { + console.log(e.relatedTarget); + if ( + !e.relatedTarget || + e.relatedTarget.getAttribute("data-confirm") !== "true" + ) { + handleCancel(); + } + }} + autoFocus + className="h-6 flex-grow px-1 py-0" + /> + + +
+ ) : ( + +
+ + {session === currentFlowId ? "Default Session" : session} + +
+
+ )} +
+ +
+
+ ); +} diff --git a/src/frontend/src/modals/IOModal/components/SessionView/index.tsx b/src/frontend/src/modals/IOModal/components/SessionView/index.tsx index 9dc300282..e3212b690 100644 --- a/src/frontend/src/modals/IOModal/components/SessionView/index.tsx +++ b/src/frontend/src/modals/IOModal/components/SessionView/index.tsx @@ -10,7 +10,10 @@ import { useMemo, useState } from "react"; import TableComponent from "../../../../components/tableComponent"; import useAlertStore from "../../../../stores/alertStore"; import { useMessagesStore } from "../../../../stores/messagesStore"; -import { messagesSorter } from "../../../../utils/utils"; +import { + extractColumnsFromRows, + messagesSorter, +} from "../../../../utils/utils"; export default function SessionView({ session, @@ -19,12 +22,12 @@ export default function SessionView({ session?: string; id?: string; }) { - const columns = useMessagesStore((state) => state.columns); const messages = useMessagesStore((state) => state.messages); const setErrorData = useAlertStore((state) => state.setErrorData); const setSuccessData = useAlertStore((state) => state.setSuccessData); const updateMessage = useMessagesStore((state) => state.updateMessage); const deleteMessagesStore = useMessagesStore((state) => state.removeMessages); + const columns = extractColumnsFromRows(messages, "intersection"); const isFetching = useIsFetching({ queryKey: ["useGetMessagesQuery"], exact: false, @@ -56,22 +59,25 @@ export default function SessionView({ ...row, [field]: newValue, }; - updateMessageMutation(data, { - onSuccess: () => { - updateMessage(data); - // Set success message - setSuccessData({ - title: "Messages updated successfully.", - }); + updateMessageMutation( + { message: data }, + { + onSuccess: () => { + updateMessage(data); + // Set success message + setSuccessData({ + title: "Messages updated successfully.", + }); + }, + onError: () => { + setErrorData({ + title: "Error updating messages.", + }); + event.data[field] = event.oldValue; + event.api.refreshCells(); + }, }, - onError: () => { - setErrorData({ - title: "Error updating messages.", - }); - event.data[field] = event.oldValue; - event.api.refreshCells(); - }, - }); + ); } const filteredMessages = useMemo(() => { diff --git a/src/frontend/src/modals/IOModal/components/chatView/chatInput/components/uploadFileButton/index.tsx b/src/frontend/src/modals/IOModal/components/chatView/chatInput/components/uploadFileButton/index.tsx index 8876c4517..d91ef8bad 100644 --- a/src/frontend/src/modals/IOModal/components/chatView/chatInput/components/uploadFileButton/index.tsx +++ b/src/frontend/src/modals/IOModal/components/chatView/chatInput/components/uploadFileButton/index.tsx @@ -18,7 +18,7 @@ const UploadFileButton = ({ /> + ); +} diff --git a/src/frontend/src/modals/IOModal/components/chatView/chatMessage/components/editMessageField/index.tsx b/src/frontend/src/modals/IOModal/components/chatView/chatMessage/components/editMessageField/index.tsx new file mode 100644 index 000000000..1b257668c --- /dev/null +++ b/src/frontend/src/modals/IOModal/components/chatView/chatMessage/components/editMessageField/index.tsx @@ -0,0 +1,75 @@ +import { Button } from "@/components/ui/button"; +import { Textarea } from "@/components/ui/textarea"; +import { useEffect, useRef, useState } from "react"; + +export default function EditMessageField({ + message: initialMessage, + onEdit, + onCancel, +}: { + message: string; + onEdit: (message: string) => void; + onCancel: () => void; +}) { + const [message, setMessage] = useState(initialMessage); + const textareaRef = useRef(null); + const [isButtonClicked, setIsButtonClicked] = useState(false); + const adjustTextareaHeight = () => { + if (textareaRef.current) { + textareaRef.current.style.height = "auto"; + textareaRef.current.style.height = `${textareaRef.current.scrollHeight + 3}px`; + } + }; + useEffect(() => { + adjustTextareaHeight(); + }, []); + + return ( +
+