feat: adds file management feature under feature flag, adds file dialog, adds files page (#6134)

* Create file management feature flag

* Added file manager modal

* Open file manager modal on clicking button

* Added file dragger

* Changed drag files component to be like the design

* Optimize code

* Implemented Import Files component

* Implemented Recent Files component

* Added Import Files and Recent Files into File Manager modal and added necessary icons

* Added file rendering to be used on component

* Added rendering of files selected on component

* Added icons for Dropbox, GoogleDrive and Onedrive

* Added dropdown menu for selecting import source

* Added button to navigate to My Files

* Added Files page, with Import button and search

* Added files table

* Updated uv and package lock

* Added tooltip for file types

* Added cursor pointer to file dragger

* Added file options dropdown

* Added files context menu to files page

* Changed side of dropdown

* Implemented search on modal

* Added Inverted AWS icon

* Added Import Button as morphed button

* Modularized Morphing Menu

* Updated Files Renderer height

* Added File endpoint and types

* Added Download File mutation

* Added Get Files query

* Added Rename File mutation

* Added Upload File mutation

* Added Use Upload File hook

* Added drop of files on dragFilesComponent

* Changed upload and rename to refetch get

* Added delete endpoint

* Changed get files to return correctly

* Make FilesRendererComponent render with correct file type

* Get files from correct endpoint

* Exported sort by date

* Added file_path to input file component

* Added file_path to File Component Type

* Implement showing correct files in FIle component and removing and adding selected files

* Removed open

* Changed selected files to handle ID

* Handled sorting of recent results

* Added sort By Boolean aux function

* Updated to use path for selected files, and to not update the selected values when Files change

* Change delete to handle id on params

* Change upload file to return files IDs

* Added duplicate file

* Added void to download files without params

* Select uploaded files

* Added onUpload to select uploaded files

* Added context menu options

* Changed to handle selected files with path

* Changed recentFiles to handle selected files with path

* Changed FilesContextMenu params

* Turned updatedAt optional

* Added files to the Files Page

* Changed file icons

* used size as number and get type from path

* Added correct CardsWrapComponent to home page

* Fixed flows and components drop in home and empty pages

* Fixed files drop in files management

* Implemented types validation when uploading via modal

* Fixed types for files page(allow all)

* Filter files on modal by file types

* Implemented multiple files handling when list parameter is true on FileInput

* Hidden unused import feature

* Added value correction if component values are incongruent

* Fixed size validator

* Add new size validator to use-upload-file

* Changed icon for rename

* removed replace function

* Implemented renaming file

* Added v2 api support

* Added v2 api on file_management

* Changed duplicate and download to include file type

* Implemented renaming on clicking rename option on files page

* Implemented rename on modal

* Implemented empty state for search and no files

* Changed text of empty state

* Added variants to morphing menu

* Fixed design of import from button and file search

* Implemented modal height changes to keep initial height

* Replace rename icon

* Removed duplicate from modal

* Added min height

* Fixed height

* Delete unused state

* Fixed size of import from button

* Added successful upload toast

* Implement plus button when files are selected

* implemented progress on uploads

* Added error handling to uploaded file

* Added delete confirmation

* Added maxFIleSizeUpload to the dialog

* Implement file upload retry

* Readded import from to files

* Added empty state to file browser

* Added list on base file

* [autofix.ci] apply automated fixes

* Try to fix crash

* Add check on files page

* Added DragWrapComponent with dragging area and opacity

* Added DragWrapComponent to files page

* [autofix.ci] apply automated fixes

* Added test ids for making tests for files page

* Added example files for tests

* Added boolean on await bootstrap to not open modal

* Added files page tests

* Added the extension on the file names

* Added datatestid and made context menu close on delete

* Added search with types

* Add type to filetype

* Added datatestid

* Changed formatFileSize to show B instead of bytes

* Updated files page test to use random names

* Changed file upload test to test file management functionality

* Modularized generate filename

* Fixed files page test

* Added ignore to pyproject

* Dont retry and dont clear

* Enable file management feature flag

* Remove import from button

* Refetch files on open of dialog

* Added upload failed state to my files

* Made not upload extensionless files

* Add toast on delete file

* removed standalone upload button

* Increased padding

* Updated row hover color

* Update padding of component

* Fixed dash lines

* Fixed api routes on vite config

* added api v2 endpoints to docker compose

* Adds router v2 and v1 to parent router

* Adds trailing slash to file management endpoint

* Fixed dash array

* Readded px-5

* Refactor dispatch methods and cleanup code for improved readability in middleware and app setup

* [autofix.ci] apply automated fixes

* Feat: add a support for OpenSearch and AstraDB components to yield the langchain vector_store connection object (#6998)

* Added decorator, decorator test, and modified supported vector stores

* Renamed module file name to reflect that this is for generic use, not use for graph rag

* Updated docsstring

* Improved documentation and modification to UT to support graph rag

* Remove extra file from PR

* rollback vector store template

* [autofix.ci] apply automated fixes

---------

Co-authored-by: Nadir J <31660040+NadirJ@users.noreply.github.com>
Co-authored-by: cristhianzl <cristhian.lousa@gmail.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>

* chore: Bump frontend package version and enhance test reliability (#7049)

* chore: bump frontend package version from 0.1.2 to 1.2.0 in package-lock.json

* test: enhance error message wait condition in generalBugs-shard-6.spec.ts

Updated the test to wait for the error message to appear with a minimum length of 20 characters, improving reliability in detecting error states. This change replaces the previous fixed timeout with a dynamic check, enhancing the robustness of the test.

* fix: Disable retries in usePostValidateComponentCode mutation (#7044)

♻️ (use-post-validate-component-code.ts): refactor usePostValidateComponentCode function to include retry and retryDelay options for better control over mutation behavior

Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

* feat: Add updateHiddenOutputs helper function to manage output visibility (#6932)

 (update-hidden-outputs.ts): add a new helper function to update hidden outputs in the frontend CustomNodes module
♻️ (use-update-all-nodes.ts): refactor useUpdateAllNodes hook to update hidden outputs for all nodes in the frontend CustomNodes module
♻️ (use-update-node-code.ts): refactor useUpdateNodeCode hook to update hidden outputs for a specific node's code in the frontend CustomNodes module
♻️ (index.ts): refactor types in the flow module to include OutputFieldType for better type checking and consistency

* docs: fix syntax errors at build (#7047)

* docs-fix-linking-errors

* docs-fix-codehike-errors

* add-mit-license-field-to-package-json

* add-tailwind-config-file

---------

Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

* chore: update test durations (#6975)

Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com>

* docs: revised README (#7052)

* revised README

* fixed links

* Update README.md

Co-authored-by: Mendon Kissling <59585235+mendonk@users.noreply.github.com>

* Update README.md

Co-authored-by: Mendon Kissling <59585235+mendonk@users.noreply.github.com>

* reverting

---------

Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
Co-authored-by: Mendon Kissling <59585235+mendonk@users.noreply.github.com>

* test: Parameterize DeepSeek model component test correctly (#7019)

test: Parameterize DeepSeek model component test with temperature and max tokens

* fix: docker test trigger for poetry is wrong now is uv (#6743)

also delete useless actions yml for now

Signed-off-by: yihong0618 <zouzou0208@gmail.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

* docs: Pull request draft workflow (#7046)

Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

* feat: Sync flows from FS to DB if flow has fs_path (#7043)

* feat: Sync flows from FS to DB if flow has fs_path

* Changes following review

* Simplify flow_mtimes handling

* Move sync_flows_from_fs to setup.py

---------

Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

* ci: fix false positive on ci sucess status (#6868)

ci: fix ci EXIT_CODE

* docs: api build and run examples update (#6904)

* run-endpoint-parameters

* docs: Update build flow API documentation with detailed examples and parameters

* table-cleanup

* cleanup

* Apply suggestions from code review

Co-authored-by: KimberlyFields <46325568+KimberlyFields@users.noreply.github.com>

* Apply suggestions from code review

* comments-from-code-review

* Update docs/docs/API-Reference/api-reference-api-examples.md

Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

---------

Co-authored-by: KimberlyFields <46325568+KimberlyFields@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

* build(deps): bump @babel/runtime-corejs3 from 7.26.9 to 7.26.10 in /docs (#7051)

Bumps [@babel/runtime-corejs3](https://github.com/babel/babel/tree/HEAD/packages/babel-runtime-corejs3) from 7.26.9 to 7.26.10.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.26.10/packages/babel-runtime-corejs3)

---
updated-dependencies:
- dependency-name: "@babel/runtime-corejs3"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>

* feat: add a unified language model component. (#6994)

* add a unified language model component with a few providers

* [autofix.ci] apply automated fixes

* fix errors and add tests

* [autofix.ci] apply automated fixes

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Edwin Jose <edwin.jose@datastax.com>

* fix: Rename AgentQL components and add prompt parameter (#6834)

* renamed components

* add prompt parameter

* [autofix.ci] apply automated fixes

* prevent both query and prompt

* ruff checks

* [autofix.ci] apply automated fixes

* amend conditional check

* change error message

* update templates

* [autofix.ci] apply automated fixes

* fix tags

* fix tags in news aggregator

---------

Co-authored-by: huwenjie912 <huwenjie912@gmail.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Edwin Jose <edwin.jose@datastax.com>

* ref: Refactor tracing service (#7011)

* Refactor tracing service

* Remove start, flush and stop

* fix: pass props to SvgAnthropicBox component (#7057)

* fix: pass props to SvgAnthropicBox component

* fix: pass props to SvgAnthropicBox component in light mode

---------

Co-authored-by: Edwin Jose <edwin.jose@datastax.com>

* fix: check if component is in tool mode as well to display Tool Mode switch (#7042)

feat: Enhance checkHasToolMode function to include tool mode detection

Updated the checkHasToolMode function to account for an additional condition where the template is considered to be in tool mode if it contains exactly three fields: _type, code, and tools_metadata. This improves the function's ability to accurately determine the tool mode status of a template.

* Fix: Text split issues related to separator (#6993)

* fixes text split issues related to separator

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes (attempt 2/3)

* format error fix

* Update Vector Store RAG.json

* [autofix.ci] apply automated fixes

* 📝 (freeze.spec.ts): update test description to match the actual element being tested for better clarity and maintainability

*  (stop-building.spec.ts): update test description to improve clarity and maintainability
 (stop-button-playground.spec.ts): add wait time before filling search input to ensure proper loading and interaction with the element

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Edwin Jose <edwin.jose@datastax.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
Co-authored-by: cristhianzl <cristhian.lousa@gmail.com>
Co-authored-by: Ítalo Johnny <italojohnnydosanjos@gmail.com>

* docs: mcp integration (#6986)

* docs: Add MCP (Model Context Protocol) integration documentation

* docs: Update Astra DB MCP integration documentation

* docs: Update Astra DB MCP integration documentation with Cursor connection section

* docs: Update MCP integration guide with Datastax Astra DB connection details and prerequisites

* Apply suggestions from code review

Co-authored-by: KimberlyFields <46325568+KimberlyFields@users.noreply.github.com>

* docs-peer-reviews

* remove-cursor-integration

* code-review

* Apply suggestions from code review

Co-authored-by: April I. Murphy <36110273+aimurphy@users.noreply.github.com>

* docs: update MCP integration guide for clarity and consistency

---------

Co-authored-by: KimberlyFields <46325568+KimberlyFields@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
Co-authored-by: April I. Murphy <36110273+aimurphy@users.noreply.github.com>

* feat: add regex pattern extractor component (#6015)

* feat: add regex pattern extractor component

* [autofix.ci] apply automated fixes

* fix: consistent schema and cleaner code style

* fix: type annotation in regex.py

* [autofix.ci] apply automated fixes

* Fix: regex component unit tests to match implementation behavior

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Ítalo Johnny <italojohnnydosanjos@gmail.com>

* docs: Add workflow to automate updates to docs/openapi.json (#7072)

Co-authored-by: Mendon Kissling <59585235+mendonk@users.noreply.github.com>

* docs: test and update google oauth integration (#6949)

* update-template

* update-integration-doc

* document-id

* Apply suggestions from code review

Co-authored-by: brian-f <brian.fisher@datastax.com>

---------

Co-authored-by: brian-f <brian.fisher@datastax.com>

* feat: add pokedex agent template (#6885)

* add-pokedex-agent-json

* update with the latest agent component

* Update Pokédex Agent.json

---------

Co-authored-by: Edwin Jose <edwin.jose@datastax.com>

* feat: apify starter template (#6784)

* add social media agent apify template

* example default values, update starter template

* revert package-lock.json

* format

* note-cleanup

* [autofix.ci] apply automated fixes

* updates to the components

* update the agent component

---------

Co-authored-by: Edwin Jose <edwin.jose@datastax.com>
Co-authored-by: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>

* feat: Needle Search Tool With Template (#6648)

* feat: Needle Search Tool With Templte

* lint

* lint

* lint

* lint

* refactor: Use Langflow Agent instead of CrewAI Agent

* techdebt: adjust Needle component to use tool mode and remove tool component

* lint

* lint

* Update Invoice Summarizer.json

* Update Invoice Summarizer.json

* update to the component

* refactor: Use Needle icon svg

* make format

* component updates

* update with latest agent component

* updated a missing connection when updating the agent component

* update template

---------

Co-authored-by: Edwin Jose <edwin.jose@datastax.com>

* feat: New parser component with multiple input types and stringify add on (#6652)

* update to parser

* error handling

* solve lint error and added tests

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes

* Update parser.py

* fix format errors

* [autofix.ci] apply automated fixes

* refactor: Remove hardcoded name attribute from ParserComponent

* Update src/backend/base/langflow/components/processing/parser.py

Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>

* error fix

* [autofix.ci] apply automated fixes

* feat: mark ParserComponent as beta

Added a beta flag to the ParserComponent to indicate its experimental status.

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
Co-authored-by: Ítalo Johnny <italojohnnydosanjos@gmail.com>
Co-authored-by: Rodrigo <rodrigosilvanader@gmail.com>

* [autofix.ci] apply automated fixes

* format

* remove console log

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* add space in docker compose

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* [autofix.ci] apply automated fixes

* Update package lock

* Removed unused file

* Fixed naming on handleChangeFiles

* Update src/frontend/src/pages/MainPage/pages/filesPage/index.tsx

Co-authored-by: Mike Fortman <michael.fortman@datastax.com>

* Used format file size

* updated file browser to My Files

* Updated cursor

* Added error on selecting no files

* Updated base FileInput name to Files

* Added truncate and tooltip to files

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes (attempt 2/3)

* merge fix

* Update src/frontend/src/components/core/parameterRenderComponent/components/inputFileComponent/index.tsx

* Update src/frontend/src/modals/fileManagerModal/components/filesContextMenuComponent/index.tsx

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes (attempt 2/3)

* [autofix.ci] apply automated fixes (attempt 3/3)

* Fixed files not maintaining state

* Update info position

* adjusted getCustomParameterTitle

* removed console.log

* updated utils.py

* Added upload file util

* Added check on upload-file to use old one if unavailable

* Use new upload file function in Document QA

* uploaded tests to use new UploadFile util

* updated general bugs to use new uploadFile

* updated upload-file

* Update file mixin to have temp file

* Update file component to not enable file management if temp file is true

* Put temp file as true for chat input

* Update starter projects

* Updated starter templates

* added tempFile condition on useEffect

* Fixed invalid size alert

* Fixed limit file test

* Fixed backend test

---------

Signed-off-by: yihong0618 <zouzou0208@gmail.com>
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
Co-authored-by: Pedro Pacheco <3083335+pedrocassalpacheco@users.noreply.github.com>
Co-authored-by: Nadir J <31660040+NadirJ@users.noreply.github.com>
Co-authored-by: cristhianzl <cristhian.lousa@gmail.com>
Co-authored-by: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com>
Co-authored-by: Carter Rabasa <carter.rabasa@gmail.com>
Co-authored-by: yihong <zouzou0208@gmail.com>
Co-authored-by: Ronnie Miller <ronnie.miller@datastax.com>
Co-authored-by: Christophe Bornet <cbornet@hotmail.com>
Co-authored-by: Ítalo Johnny <italojohnnydosanjos@gmail.com>
Co-authored-by: KimberlyFields <46325568+KimberlyFields@users.noreply.github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Rodrigo Nader <rodrigosilvanader@gmail.com>
Co-authored-by: Edwin Jose <edwin.jose@datastax.com>
Co-authored-by: wjwjtf <wenjie@tinyfish.io>
Co-authored-by: huwenjie912 <huwenjie912@gmail.com>
Co-authored-by: Eddie Ho <62191480+noodleslove@users.noreply.github.com>
Co-authored-by: April I. Murphy <36110273+aimurphy@users.noreply.github.com>
Co-authored-by: Raphael Valdetaro <79842132+raphaelchristi@users.noreply.github.com>
Co-authored-by: brian-f <brian.fisher@datastax.com>
Co-authored-by: Jakub Kopecký <me@kopecky.io>
Co-authored-by: Jan Heimes <45521680+JANHMS@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Mike Fortman <michael.fortman@datastax.com>
This commit is contained in:
Lucas Oliveira 2025-03-17 23:32:55 -03:00 committed by GitHub
commit 093cc42f38
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
115 changed files with 5454 additions and 7051 deletions

View file

@ -42,7 +42,7 @@ services:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/api/v2`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
frontend:
@ -56,7 +56,7 @@ services:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-celeryworker-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.routers.${STACK_NAME?Variable not set}-celeryworker-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/api/v2`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-celeryworker.loadbalancer.server.port=7860
networks:

View file

@ -80,7 +80,7 @@ services:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/api/v2`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
db:

View file

@ -253,7 +253,7 @@ directory = "coverage"
[tool.ruff]
exclude = ["src/backend/base/langflow/alembic/*"]
exclude = ["src/backend/base/langflow/alembic/*", "src/frontend/tests/assets/*"]
line-length = 120
[tool.ruff.lint]

View file

@ -1,5 +1,5 @@
from langflow.api.health_check_router import health_check_router
from langflow.api.log_router import log_router
from langflow.api.router import router, router_v2
from langflow.api.router import router
__all__ = ["health_check_router", "log_router", "router", "router_v2"]
__all__ = ["health_check_router", "log_router", "router"]

View file

@ -19,25 +19,32 @@ from langflow.api.v1 import (
from langflow.api.v2 import files_router as files_router_v2
router = APIRouter(
prefix="/api/v1",
prefix="/api",
)
router_v1 = APIRouter(
prefix="/v1",
)
router_v2 = APIRouter(
prefix="/api/v2",
prefix="/v2",
)
router.include_router(chat_router)
router.include_router(endpoints_router)
router.include_router(validate_router)
router.include_router(store_router)
router.include_router(flows_router)
router.include_router(users_router)
router.include_router(api_key_router)
router.include_router(login_router)
router.include_router(variables_router)
router.include_router(files_router)
router.include_router(monitor_router)
router.include_router(folders_router)
router.include_router(starter_projects_router)
router_v1.include_router(chat_router)
router_v1.include_router(endpoints_router)
router_v1.include_router(validate_router)
router_v1.include_router(store_router)
router_v1.include_router(flows_router)
router_v1.include_router(users_router)
router_v1.include_router(api_key_router)
router_v1.include_router(login_router)
router_v1.include_router(variables_router)
router_v1.include_router(files_router)
router_v1.include_router(monitor_router)
router_v1.include_router(folders_router)
router_v1.include_router(starter_projects_router)
router_v2.include_router(files_router_v2)
router.include_router(router_v1)
router.include_router(router_v2)

View file

@ -117,11 +117,12 @@ class BaseFileComponent(Component, ABC):
_base_inputs = [
FileInput(
name="path",
display_name="Path",
display_name="Files",
fileTypes=[], # Dynamically set in __init__
info="", # Dynamically set in __init__
required=False,
value="",
list=True,
value=[],
),
HandleInput(
name="file_path",

View file

@ -66,6 +66,7 @@ class ChatInput(ChatComponent):
info="Files to be sent with the message.",
advanced=True,
is_list=True,
temp_file=True,
),
MessageTextInput(
name="background_color",

View file

@ -8,16 +8,12 @@
"dataType": "Prompt",
"id": "Prompt-f1f2v",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-lL9HA",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -35,16 +31,12 @@
"dataType": "ChatInput",
"id": "ChatInput-GyBUF",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-lL9HA",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -62,16 +54,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-lL9HA",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-JieGw",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -89,16 +77,12 @@
"dataType": "Prompt",
"id": "Prompt-4IOgm",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-JieGw",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -116,16 +100,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-JieGw",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-dXMRv",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -143,16 +123,12 @@
"dataType": "Prompt",
"id": "Prompt-FRjO8",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-dXMRv",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -170,18 +146,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-dXMRv",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-KXQMh",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -200,9 +170,7 @@
"display_name": "Prompt",
"id": "Prompt-4IOgm",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -212,9 +180,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -230,9 +196,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -281,9 +245,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -325,9 +287,7 @@
"data": {
"id": "ChatInput-GyBUF",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -363,9 +323,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -378,9 +336,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -399,9 +355,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -430,7 +384,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -469,6 +423,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -502,10 +457,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -520,9 +472,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -541,9 +491,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -578,9 +526,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -622,9 +568,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-KXQMh",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -658,9 +602,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -673,9 +615,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -695,9 +635,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -753,9 +691,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -775,11 +711,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -800,10 +732,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -819,9 +748,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -841,9 +768,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -879,9 +804,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -925,9 +848,7 @@
"display_name": "Prompt",
"id": "Prompt-FRjO8",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -937,9 +858,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -955,9 +874,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1006,9 +923,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1052,9 +967,7 @@
"display_name": "Prompt",
"id": "Prompt-f1f2v",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -1064,9 +977,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -1082,9 +993,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1133,9 +1042,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1251,10 +1158,7 @@
"data": {
"id": "OpenAIModel-lL9HA",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -1293,9 +1197,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1304,14 +1206,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -1325,9 +1223,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1362,9 +1258,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1546,9 +1440,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1633,10 +1525,7 @@
"data": {
"id": "OpenAIModel-JieGw",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -1675,9 +1564,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1686,14 +1573,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -1707,9 +1590,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1744,9 +1625,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1928,9 +1807,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2015,10 +1892,7 @@
"data": {
"id": "OpenAIModel-dXMRv",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -2057,9 +1931,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -2068,14 +1940,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -2089,9 +1957,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -2126,9 +1992,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2310,9 +2174,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2408,7 +2270,5 @@
"is_component": false,
"last_tested_version": "1.0.19.post2",
"name": "Prompt Chaining",
"tags": [
"chatbots"
]
}
"tags": ["chatbots"]
}

View file

@ -8,16 +8,12 @@
"dataType": "ChatInput",
"id": "ChatInput-jFwUm",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-OcXkl",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -34,16 +30,12 @@
"dataType": "Prompt",
"id": "Prompt-3SM2g",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-OcXkl",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -60,18 +52,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-OcXkl",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-gDYiJ",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -89,9 +75,7 @@
"display_name": "Chat Input",
"id": "ChatInput-jFwUm",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -122,9 +106,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -137,9 +119,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -158,9 +138,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -189,7 +167,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"advanced": true,
@ -227,6 +205,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -257,10 +236,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -274,9 +250,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -294,9 +268,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -331,9 +303,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -375,9 +345,7 @@
"display_name": "Prompt",
"id": "Prompt-3SM2g",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -387,9 +355,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -404,9 +370,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -455,9 +419,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -572,9 +534,7 @@
"data": {
"id": "ChatOutput-gDYiJ",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -607,9 +567,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -622,9 +580,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -644,9 +600,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -702,9 +656,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -724,11 +676,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -749,10 +697,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -768,9 +713,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -790,9 +733,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -828,9 +769,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -872,10 +811,7 @@
"data": {
"id": "OpenAIModel-OcXkl",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -914,9 +850,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -925,14 +859,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -945,9 +875,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -982,9 +910,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1166,9 +1092,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1265,7 +1189,5 @@
"is_component": false,
"last_tested_version": "1.0.19.post2",
"name": "Basic Prompting",
"tags": [
"chatbots"
]
}
"tags": ["chatbots"]
}

File diff suppressed because one or more lines are too long

View file

@ -9,16 +9,12 @@
"dataType": "ChatInput",
"id": "ChatInput-Tza35",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "NovitaModel-NPPoo",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -37,16 +33,12 @@
"dataType": "Prompt",
"id": "Prompt-lo2oI",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "NovitaModel-NPPoo",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -65,18 +57,12 @@
"dataType": "NovitaModel",
"id": "NovitaModel-NPPoo",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-G6tol",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -93,9 +79,7 @@
"data": {
"id": "ChatInput-Tza35",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -130,9 +114,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -145,9 +127,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -168,9 +148,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -201,7 +179,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -241,6 +219,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -277,10 +256,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -297,9 +273,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -320,9 +294,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -361,9 +333,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -401,9 +371,7 @@
"data": {
"id": "Prompt-lo2oI",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -414,10 +382,7 @@
"documentation": "",
"edited": false,
"error": null,
"field_order": [
"template",
"tool_placeholder"
],
"field_order": ["template", "tool_placeholder"],
"frozen": false,
"full_path": null,
"icon": "prompts",
@ -439,9 +404,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -490,9 +453,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -530,10 +491,7 @@
"data": {
"id": "NovitaModel-NPPoo",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -571,9 +529,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -585,9 +541,7 @@
"required_inputs": [],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -600,9 +554,7 @@
"display_name": "Novita API Key",
"dynamic": false,
"info": "The Novita API Key to use for Novita AI models.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -638,9 +590,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -775,9 +725,7 @@
"display_name": "Output Parser",
"dynamic": false,
"info": "The parser to use to parse the output of the model",
"input_types": [
"OutputParser"
],
"input_types": ["OutputParser"],
"list": false,
"list_add_label": "Add More",
"name": "output_parser",
@ -831,9 +779,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -900,9 +846,7 @@
"data": {
"id": "ChatOutput-G6tol",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -937,9 +881,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -952,9 +894,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -975,9 +915,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1034,9 +972,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1057,11 +993,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1085,10 +1017,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1105,9 +1034,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1128,9 +1055,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1169,9 +1094,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1245,8 +1168,5 @@
"is_component": false,
"last_tested_version": "1.2.0",
"name": "Diet Analysis",
"tags": [
"chatbots",
"content-generation"
]
}
"tags": ["chatbots", "content-generation"]
}

View file

@ -8,16 +8,12 @@
"dataType": "File",
"id": "File-GwJQZ",
"name": "data",
"output_types": [
"Data"
]
"output_types": ["Data"]
},
"targetHandle": {
"fieldName": "data",
"id": "ParseData-BbvKb",
"inputTypes": [
"Data"
],
"inputTypes": ["Data"],
"type": "other"
}
},
@ -35,17 +31,12 @@
"dataType": "ParseData",
"id": "ParseData-BbvKb",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "Document",
"id": "Prompt-yvZHT",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -63,16 +54,12 @@
"dataType": "ChatInput",
"id": "ChatInput-li477",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-atkmo",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -90,16 +77,12 @@
"dataType": "Prompt",
"id": "Prompt-yvZHT",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-atkmo",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -117,18 +100,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-atkmo",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-8pgwS",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -147,9 +124,7 @@
"display_name": "Chat Input",
"id": "ChatInput-li477",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -180,9 +155,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -195,9 +168,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -216,9 +187,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -247,7 +216,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"advanced": true,
@ -285,6 +254,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -315,10 +285,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -332,9 +299,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -352,9 +317,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -389,9 +352,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -433,9 +394,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-8pgwS",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -469,9 +428,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -484,9 +441,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -506,9 +461,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -564,9 +517,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -586,11 +537,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -611,10 +558,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -630,9 +574,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -652,9 +594,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -690,9 +630,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -736,9 +674,7 @@
"display_name": "Parse Data",
"id": "ParseData-BbvKb",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -746,11 +682,7 @@
"display_name": "Parse Data",
"documentation": "",
"edited": false,
"field_order": [
"data",
"template",
"sep"
],
"field_order": ["data", "template", "sep"],
"frozen": false,
"icon": "message-square",
"legacy": false,
@ -768,9 +700,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -781,9 +711,7 @@
"name": "data_list",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -813,9 +741,7 @@
"display_name": "Data",
"dynamic": false,
"info": "The data to convert to text.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"list": true,
"name": "data",
"placeholder": "",
@ -848,9 +774,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -958,9 +882,7 @@
"data": {
"id": "File-GwJQZ",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -989,9 +911,7 @@
"required_inputs": [],
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
},
{
@ -1003,9 +923,7 @@
"required_inputs": [],
"selected": "DataFrame",
"tool_mode": true,
"types": [
"DataFrame"
],
"types": ["DataFrame"],
"value": "__UNDEFINED__"
},
{
@ -1017,9 +935,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1082,10 +998,7 @@
"display_name": "Server File Path",
"dynamic": false,
"info": "Data object with a 'file_path' property pointing to server file or a Message object with a path to the file. Supercedes 'Path' but supports same file types.",
"input_types": [
"Data",
"Message"
],
"input_types": ["Data", "Message"],
"list": true,
"name": "file_path",
"placeholder": "",
@ -1131,7 +1044,7 @@
"path": {
"_input_type": "FileInput",
"advanced": false,
"display_name": "Path",
"display_name": "Files",
"dynamic": false,
"fileTypes": [
"txt",
@ -1160,7 +1073,7 @@
],
"file_path": "",
"info": "Supported file extensions: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx; optionally bundled in file extensions: zip, tar, tgz, bz2, gz",
"list": false,
"list": true,
"name": "path",
"placeholder": "",
"required": false,
@ -1251,24 +1164,18 @@
"display_name": "Prompt",
"id": "Prompt-yvZHT",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
"template": [
"Document"
]
"template": ["Document"]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"documentation": "",
"edited": false,
"error": null,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"full_path": null,
"icon": "prompts",
@ -1289,9 +1196,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1305,10 +1210,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1361,9 +1263,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1405,10 +1305,7 @@
"data": {
"id": "OpenAIModel-atkmo",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -1447,9 +1344,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1458,14 +1353,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -1479,9 +1370,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1516,9 +1405,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1700,9 +1587,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1798,9 +1683,5 @@
"is_component": false,
"last_tested_version": "1.0.19.post2",
"name": "Document Q&A",
"tags": [
"rag",
"q-a",
"openai"
]
}
"tags": ["rag", "q-a", "openai"]
}

View file

@ -9,16 +9,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-hx0nZ",
"name": "model_output",
"output_types": [
"LanguageModel"
]
"output_types": ["LanguageModel"]
},
"targetHandle": {
"fieldName": "llm",
"id": "StructuredOutputv2-Io4Zq",
"inputTypes": [
"LanguageModel"
],
"inputTypes": ["LanguageModel"],
"type": "other"
}
},
@ -37,16 +33,12 @@
"dataType": "ChatInput",
"id": "ChatInput-Gb2ag",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "StructuredOutputv2-Io4Zq",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -65,18 +57,12 @@
"dataType": "ParseDataFrame",
"id": "ParseDataFrame-PwX09",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-xjU9g",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -94,16 +80,12 @@
"dataType": "StructuredOutput",
"id": "StructuredOutputv2-Io4Zq",
"name": "structured_output_dataframe",
"output_types": [
"DataFrame"
]
"output_types": ["DataFrame"]
},
"targetHandle": {
"fieldName": "df",
"id": "ParseDataFrame-PwX09",
"inputTypes": [
"DataFrame"
],
"inputTypes": ["DataFrame"],
"type": "other"
}
},
@ -120,10 +102,7 @@
"data": {
"id": "OpenAIModel-hx0nZ",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -165,9 +144,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -176,14 +153,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -197,9 +170,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -234,9 +205,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -418,9 +387,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -505,9 +472,7 @@
"data": {
"id": "ChatOutput-xjU9g",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "outputs",
"conditional_paths": [],
@ -544,9 +509,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -560,9 +523,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -583,9 +544,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -642,9 +601,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -665,11 +622,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -693,10 +646,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -713,9 +663,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -736,9 +684,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -777,9 +723,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -816,9 +760,7 @@
"data": {
"id": "ChatInput-Gb2ag",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -855,9 +797,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -871,9 +811,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -894,9 +832,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -927,7 +863,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -967,6 +903,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -1003,10 +940,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1023,9 +957,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1046,9 +978,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1087,9 +1017,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1180,10 +1108,7 @@
"data": {
"id": "StructuredOutputv2-Io4Zq",
"node": {
"base_classes": [
"Data",
"DataFrame"
],
"base_classes": ["Data", "DataFrame"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1213,9 +1138,7 @@
"name": "structured_output",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
},
{
@ -1226,9 +1149,7 @@
"name": "structured_output_dataframe",
"selected": "DataFrame",
"tool_mode": true,
"types": [
"DataFrame"
],
"types": ["DataFrame"],
"value": "__UNDEFINED__"
}
],
@ -1259,9 +1180,7 @@
"display_name": "Input Message",
"dynamic": false,
"info": "The input message to the language model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1282,9 +1201,7 @@
"display_name": "Language Model",
"dynamic": false,
"info": "The language model to use to generate the structured output.",
"input_types": [
"LanguageModel"
],
"input_types": ["LanguageModel"],
"list": false,
"list_add_label": "Add More",
"name": "llm",
@ -1417,9 +1334,7 @@
"display_name": "Schema Name",
"dynamic": false,
"info": "Provide a name for the output data schema.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1440,9 +1355,7 @@
"display_name": "Format Instructions",
"dynamic": false,
"info": "The instructions to the language model for formatting the output.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1481,9 +1394,7 @@
"data": {
"id": "ParseDataFrame-PwX09",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "processing",
"conditional_paths": [],
@ -1492,11 +1403,7 @@
"display_name": "Parse DataFrame",
"documentation": "",
"edited": false,
"field_order": [
"df",
"template",
"sep"
],
"field_order": ["df", "template", "sep"],
"frozen": false,
"icon": "braces",
"key": "ParseDataFrame",
@ -1514,9 +1421,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1548,9 +1453,7 @@
"display_name": "DataFrame",
"dynamic": false,
"info": "The DataFrame to convert to text rows.",
"input_types": [
"DataFrame"
],
"input_types": ["DataFrame"],
"list": false,
"list_add_label": "Add More",
"name": "df",
@ -1589,9 +1492,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template for formatting each row. Use placeholders matching column names in the DataFrame, for example '{col1}', '{col2}'.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1640,8 +1541,5 @@
"is_component": false,
"last_tested_version": "1.1.5",
"name": "Financial Report Parser",
"tags": [
"chatbots",
"content-generation"
]
}
"tags": ["chatbots", "content-generation"]
}

View file

@ -9,16 +9,12 @@
"dataType": "ChatInput",
"id": "ChatInput-fifot",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-5rqMu",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -37,18 +33,12 @@
"dataType": "Agent",
"id": "Agent-5rqMu",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-mXpv2",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -67,16 +57,12 @@
"dataType": "ComposioAPI",
"id": "ComposioAPI-Z0Iiy",
"name": "tools",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-5rqMu",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -92,9 +78,7 @@
"data": {
"id": "Agent-5rqMu",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "agents",
"conditional_paths": [],
@ -148,9 +132,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -182,9 +164,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -238,9 +218,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -293,9 +271,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -394,9 +370,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -501,10 +475,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -542,11 +513,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -563,9 +530,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -586,9 +551,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -609,9 +572,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -661,9 +622,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -703,9 +662,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -758,9 +715,7 @@
"data": {
"id": "ChatInput-fifot",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -797,9 +752,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -813,9 +766,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -836,9 +787,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -869,7 +818,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -909,6 +858,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -945,10 +895,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -965,9 +912,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -988,9 +933,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1029,9 +972,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1069,9 +1010,7 @@
"data": {
"id": "ChatOutput-mXpv2",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "outputs",
"conditional_paths": [],
@ -1108,9 +1047,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1124,9 +1061,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1147,9 +1082,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1206,9 +1139,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1229,11 +1160,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1257,10 +1184,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1277,9 +1201,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1300,9 +1222,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1341,9 +1261,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1410,9 +1328,7 @@
"display_name": "Composio Tools",
"id": "ComposioAPI-Z0Iiy",
"node": {
"base_classes": [
"Tool"
],
"base_classes": ["Tool"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1448,9 +1364,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -1497,9 +1411,7 @@
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": [
"GMAIL_GET_PEOPLE"
]
"value": ["GMAIL_GET_PEOPLE"]
},
"api_key": {
"_input_type": "SecretStrInput",
@ -1507,9 +1419,7 @@
"display_name": "Composio API Key",
"dynamic": false,
"info": "Refer to https://docs.composio.dev/faq/api_key/api_key",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1527,9 +1437,7 @@
"display_name": "App Credentials",
"dynamic": true,
"info": "Credentials for app authentication (API Key, Password, etc)",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "app_credentials",
"password": true,
@ -1747,9 +1655,7 @@
"display_name": "Entity ID",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1770,9 +1676,7 @@
"display_name": "Username",
"dynamic": true,
"info": "Username for Basic authentication",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1818,7 +1722,5 @@
"id": "0473161e-ca7e-413c-9113-e98a142313ed",
"is_component": false,
"name": "Gmail Agent",
"tags": [
"agents"
]
}
"tags": ["agents"]
}

View file

@ -9,16 +9,12 @@
"dataType": "StructuredOutputComponent",
"id": "StructuredOutputComponent-XYoUc",
"name": "structured_output",
"output_types": [
"Data"
]
"output_types": ["Data"]
},
"targetHandle": {
"fieldName": "data",
"id": "ParseData-HzweJ",
"inputTypes": [
"Data"
],
"inputTypes": ["Data"],
"type": "other"
}
},
@ -37,18 +33,12 @@
"dataType": "ParseData",
"id": "ParseData-HzweJ",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-xQxLm",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -66,16 +56,12 @@
"dataType": "ChatInput",
"id": "ChatInput-rAWlE",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-cqeNw",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -93,16 +79,12 @@
"dataType": "Prompt",
"id": "Prompt-AzK6t",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-cqeNw",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -120,16 +102,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-cqeNw",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "StructuredOutputComponent-XYoUc",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -147,16 +125,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-cqeNw",
"name": "model_output",
"output_types": [
"LanguageModel"
]
"output_types": ["LanguageModel"]
},
"targetHandle": {
"fieldName": "llm",
"id": "StructuredOutputComponent-XYoUc",
"inputTypes": [
"LanguageModel"
],
"inputTypes": ["LanguageModel"],
"type": "other"
}
},
@ -175,9 +149,7 @@
"display_name": "Chat Input",
"id": "ChatInput-rAWlE",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -211,9 +183,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -226,9 +196,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -248,9 +216,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -280,7 +246,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -319,6 +285,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -353,10 +320,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -372,9 +336,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -394,9 +356,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -432,9 +392,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -478,9 +436,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-xQxLm",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -514,9 +470,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -529,9 +483,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -551,9 +503,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -609,9 +559,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -631,11 +579,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -656,10 +600,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -675,9 +616,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -697,9 +636,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -735,9 +672,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -816,9 +751,7 @@
"display_name": "Structured Output",
"id": "StructuredOutputComponent-XYoUc",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -846,9 +779,7 @@
"method": "build_structured_output",
"name": "structured_output",
"selected": "Data",
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -879,9 +810,7 @@
"display_name": "Input message",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -901,9 +830,7 @@
"display_name": "Language Model",
"dynamic": false,
"info": "The language model to use to generate the structured output.",
"input_types": [
"LanguageModel"
],
"input_types": ["LanguageModel"],
"list": false,
"name": "llm",
"placeholder": "",
@ -1047,9 +974,7 @@
"data": {
"id": "ParseData-HzweJ",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1057,11 +982,7 @@
"display_name": "Parse Data",
"documentation": "",
"edited": false,
"field_order": [
"data",
"template",
"sep"
],
"field_order": ["data", "template", "sep"],
"frozen": false,
"icon": "message-square",
"legacy": false,
@ -1079,9 +1000,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1092,9 +1011,7 @@
"name": "data_list",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -1125,9 +1042,7 @@
"display_name": "Data",
"dynamic": false,
"info": "The data to convert to text.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"list": true,
"name": "data",
"placeholder": "",
@ -1163,9 +1078,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1210,9 +1123,7 @@
"display_name": "Prompt",
"id": "Prompt-AzK6t",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -1222,9 +1133,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -1240,9 +1149,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1291,9 +1198,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1335,10 +1240,7 @@
"data": {
"id": "OpenAIModel-cqeNw",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -1377,9 +1279,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1388,14 +1288,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -1409,9 +1305,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1446,9 +1340,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1630,9 +1522,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1728,7 +1618,5 @@
"is_component": false,
"last_tested_version": "1.0.19.post2",
"name": "Image Sentiment Analysis",
"tags": [
"classification"
]
}
"tags": ["classification"]
}

View file

@ -9,16 +9,12 @@
"dataType": "Prompt",
"id": "Prompt-WRaed",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_prompt",
"id": "Agent-IDDA1",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -37,16 +33,12 @@
"dataType": "needle",
"id": "needle-nk3Lq",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-IDDA1",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -65,16 +57,12 @@
"dataType": "ChatInput",
"id": "ChatInput-V0cjZ",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-IDDA1",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -91,18 +79,12 @@
"dataType": "Agent",
"id": "Agent-IDDA1",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-Ftr5v",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -147,9 +129,7 @@
"data": {
"id": "Prompt-WRaed",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -160,10 +140,7 @@
"documentation": "",
"edited": false,
"error": null,
"field_order": [
"template",
"tool_placeholder"
],
"field_order": ["template", "tool_placeholder"],
"frozen": false,
"full_path": null,
"icon": "prompts",
@ -184,9 +161,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -235,9 +210,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -275,9 +248,7 @@
"data": {
"id": "ChatOutput-Ftr5v",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -312,9 +283,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -327,9 +296,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -350,9 +317,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -409,9 +374,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -432,11 +395,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -460,10 +419,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -480,9 +436,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -503,9 +457,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -544,9 +496,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -639,9 +589,7 @@
"data": {
"id": "needle-nk3Lq",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "needle",
"conditional_paths": [],
@ -674,9 +622,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -708,9 +654,7 @@
"display_name": "Collection ID",
"dynamic": false,
"info": "The ID of the Needle collection.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -731,9 +675,7 @@
"display_name": "Needle API Key",
"dynamic": false,
"info": "Your Needle API key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "needle_api_key",
"password": true,
@ -750,9 +692,7 @@
"display_name": "User Query",
"dynamic": false,
"info": "Enter your question here. In tool mode, you can also specify top_k parameter (min: 20).",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -792,10 +732,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -865,9 +802,7 @@
{
"description": "run(collection_id: Message, needle_api_key: Message, query: Message, top_k: FieldTypes.INTEGER) - A retriever that uses the Needle API to search collections.",
"name": "needle-run",
"tags": [
"needle-run"
]
"tags": ["needle-run"]
}
]
},
@ -911,9 +846,7 @@
"data": {
"id": "ChatInput-V0cjZ",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -949,9 +882,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -965,9 +896,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -988,9 +917,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1021,7 +948,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -1061,6 +988,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -1097,10 +1025,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1117,9 +1042,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1140,9 +1063,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1181,9 +1102,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1221,9 +1140,7 @@
"data": {
"id": "Agent-IDDA1",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "agents",
"conditional_paths": [],
@ -1276,9 +1193,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1311,9 +1226,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1395,9 +1308,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -1450,9 +1361,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1551,9 +1460,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -1658,10 +1565,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1699,11 +1603,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1720,9 +1620,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1743,9 +1641,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1767,9 +1663,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1820,9 +1714,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1862,9 +1754,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -1925,11 +1815,5 @@
"is_component": false,
"last_tested_version": "1.2.0",
"name": "Invoice Summarizer",
"tags": [
"chatbots",
"content-generation",
"agent",
"assistants",
"agents"
]
}
"tags": ["chatbots", "content-generation", "agent", "assistants", "agents"]
}

View file

@ -9,16 +9,12 @@
"dataType": "LoopComponent",
"id": "LoopComponent-QhsXW",
"name": "item",
"output_types": [
"Data"
]
"output_types": ["Data"]
},
"targetHandle": {
"fieldName": "data",
"id": "ParseData-x6wkY",
"inputTypes": [
"Data"
],
"inputTypes": ["Data"],
"type": "other"
}
},
@ -37,16 +33,12 @@
"dataType": "ParseData",
"id": "ParseData-x6wkY",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "AnthropicModel-uuAY6",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -65,16 +57,12 @@
"dataType": "AnthropicModel",
"id": "AnthropicModel-uuAY6",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "message",
"id": "MessagetoData-9Kbcj",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -93,17 +81,13 @@
"dataType": "MessagetoData",
"id": "MessagetoData-9Kbcj",
"name": "data",
"output_types": [
"Data"
]
"output_types": ["Data"]
},
"targetHandle": {
"dataType": "LoopComponent",
"id": "LoopComponent-QhsXW",
"name": "item",
"output_types": [
"Data"
]
"output_types": ["Data"]
}
},
"id": "reactflow__edge-MessagetoData-9Kbcj{œdataTypeœ:œMessagetoDataœ,œidœ:œMessagetoData-9Kbcjœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-LoopComponent-QhsXW{œdataTypeœ:œLoopComponentœ,œidœ:œLoopComponent-QhsXWœ,œnameœ:œitemœ,œoutput_typesœ:[œDataœ]}",
@ -121,16 +105,12 @@
"dataType": "LoopComponent",
"id": "LoopComponent-QhsXW",
"name": "done",
"output_types": [
"Data"
]
"output_types": ["Data"]
},
"targetHandle": {
"fieldName": "data",
"id": "ParseData-NpCtQ",
"inputTypes": [
"Data"
],
"inputTypes": ["Data"],
"type": "other"
}
},
@ -149,18 +129,12 @@
"dataType": "ParseData",
"id": "ParseData-NpCtQ",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-Uq5M0",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -179,16 +153,12 @@
"dataType": "ChatInput",
"id": "ChatInput-doR66",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "search_query",
"id": "ArXivComponent-Vwek5",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -205,16 +175,12 @@
"dataType": "ArXivComponent",
"id": "ArXivComponent-Vwek5",
"name": "data",
"output_types": [
"Data"
]
"output_types": ["Data"]
},
"targetHandle": {
"fieldName": "data",
"id": "LoopComponent-QhsXW",
"inputTypes": [
"Data"
],
"inputTypes": ["Data"],
"type": "other"
}
},
@ -230,9 +196,7 @@
"data": {
"id": "ArXivComponent-Vwek5",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -240,11 +204,7 @@
"display_name": "arXiv",
"documentation": "",
"edited": false,
"field_order": [
"search_query",
"search_type",
"max_results"
],
"field_order": ["search_query", "search_type", "max_results"],
"frozen": false,
"icon": "arXiv",
"legacy": false,
@ -261,9 +221,7 @@
"name": "data",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
},
{
@ -274,9 +232,7 @@
"name": "dataframe",
"selected": "DataFrame",
"tool_mode": true,
"types": [
"DataFrame"
],
"types": ["DataFrame"],
"value": "__UNDEFINED__"
}
],
@ -325,9 +281,7 @@
"display_name": "Search Query",
"dynamic": false,
"info": "The search query for arXiv papers (e.g., 'quantum computing')",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -351,13 +305,7 @@
"dynamic": false,
"info": "The field to search in",
"name": "search_type",
"options": [
"all",
"title",
"abstract",
"author",
"cat"
],
"options": ["all", "title", "abstract", "author", "cat"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -391,9 +339,7 @@
"data": {
"id": "LoopComponent-QhsXW",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"category": "logic",
"conditional_paths": [],
@ -402,9 +348,7 @@
"display_name": "Loop",
"documentation": "",
"edited": false,
"field_order": [
"data"
],
"field_order": ["data"],
"frozen": false,
"icon": "infinity",
"key": "LoopComponent",
@ -422,9 +366,7 @@
"name": "item",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
},
{
@ -435,9 +377,7 @@
"name": "done",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -469,9 +409,7 @@
"display_name": "Data",
"dynamic": false,
"info": "The initial list of Data objects to iterate over.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"list": false,
"list_add_label": "Add More",
"name": "data",
@ -508,10 +446,7 @@
"data": {
"id": "ParseData-x6wkY",
"node": {
"base_classes": [
"Data",
"Message"
],
"base_classes": ["Data", "Message"],
"beta": false,
"category": "processing",
"conditional_paths": [],
@ -520,11 +455,7 @@
"display_name": "Data to Message",
"documentation": "",
"edited": false,
"field_order": [
"data",
"template",
"sep"
],
"field_order": ["data", "template", "sep"],
"frozen": false,
"icon": "message-square",
"key": "ParseData",
@ -544,9 +475,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -557,9 +486,7 @@
"name": "data_list",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -591,9 +518,7 @@
"display_name": "Data",
"dynamic": false,
"info": "The data to convert to text.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"list": true,
"list_add_label": "Add More",
"name": "data",
@ -632,9 +557,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -673,10 +596,7 @@
"data": {
"id": "AnthropicModel-uuAY6",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -715,9 +635,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -726,14 +644,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -747,9 +661,7 @@
"display_name": "Anthropic API Key",
"dynamic": false,
"info": "Your Anthropic API key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -767,9 +679,7 @@
"display_name": "Anthropic API URL",
"dynamic": false,
"info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -809,9 +719,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -880,9 +788,7 @@
"display_name": "Prefill",
"dynamic": false,
"info": "Prefill text to guide the model's response.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -921,9 +827,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1009,9 +913,7 @@
"data": {
"id": "MessagetoData-9Kbcj",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": true,
"category": "processing",
"conditional_paths": [],
@ -1020,9 +922,7 @@
"display_name": "Message to Data",
"documentation": "",
"edited": false,
"field_order": [
"message"
],
"field_order": ["message"],
"frozen": false,
"icon": "message-square-share",
"key": "MessagetoData",
@ -1040,9 +940,7 @@
"name": "data",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -1074,9 +972,7 @@
"display_name": "Message",
"dynamic": false,
"info": "The Message object to convert to a Data object",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1114,10 +1010,7 @@
"data": {
"id": "ParseData-NpCtQ",
"node": {
"base_classes": [
"Data",
"Message"
],
"base_classes": ["Data", "Message"],
"beta": false,
"category": "processing",
"conditional_paths": [],
@ -1126,11 +1019,7 @@
"display_name": "Data to Message",
"documentation": "",
"edited": false,
"field_order": [
"data",
"template",
"sep"
],
"field_order": ["data", "template", "sep"],
"frozen": false,
"icon": "message-square",
"key": "ParseData",
@ -1150,9 +1039,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1163,9 +1050,7 @@
"name": "data_list",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -1197,9 +1082,7 @@
"display_name": "Data",
"dynamic": false,
"info": "The data to convert to text.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"list": true,
"list_add_label": "Add More",
"name": "data",
@ -1238,9 +1121,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1279,9 +1160,7 @@
"data": {
"id": "ChatOutput-Uq5M0",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1316,9 +1195,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1331,9 +1208,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1354,9 +1229,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1413,9 +1286,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1436,11 +1307,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1464,10 +1331,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1484,9 +1348,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1507,9 +1369,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1548,9 +1408,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1588,9 +1446,7 @@
"data": {
"id": "ChatInput-doR66",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1625,9 +1481,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1640,9 +1494,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1663,9 +1515,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1696,7 +1546,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -1736,6 +1586,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -1772,10 +1623,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1792,9 +1640,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1815,9 +1661,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1856,9 +1700,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1963,8 +1805,5 @@
"is_component": false,
"last_tested_version": "1.1.5",
"name": "Research Translation Loop",
"tags": [
"chatbots",
"content-generation"
]
}
"tags": ["chatbots", "content-generation"]
}

View file

@ -9,16 +9,12 @@
"dataType": "StructuredOutputComponent",
"id": "StructuredOutputComponent-Kqbq4",
"name": "structured_output",
"output_types": [
"Data"
]
"output_types": ["Data"]
},
"targetHandle": {
"fieldName": "data",
"id": "ParseData-7XOFR",
"inputTypes": [
"Data"
],
"inputTypes": ["Data"],
"type": "other"
}
},
@ -37,16 +33,12 @@
"dataType": "ChatInput",
"id": "ChatInput-tMLRq",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-dcKuR",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -65,16 +57,12 @@
"dataType": "Agent",
"id": "Agent-dcKuR",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "StructuredOutputComponent-Kqbq4",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -93,16 +81,12 @@
"dataType": "TavilySearchComponent",
"id": "TavilySearchComponent-cGK9T",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-dcKuR",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -120,16 +104,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-prL67",
"name": "model_output",
"output_types": [
"LanguageModel"
]
"output_types": ["LanguageModel"]
},
"targetHandle": {
"fieldName": "llm",
"id": "StructuredOutputComponent-Kqbq4",
"inputTypes": [
"LanguageModel"
],
"inputTypes": ["LanguageModel"],
"type": "other"
}
},
@ -145,18 +125,12 @@
"dataType": "ParseData",
"id": "ParseData-7XOFR",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-JrLxU",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -174,9 +148,7 @@
"display_name": "Chat Input",
"id": "ChatInput-tMLRq",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -210,9 +182,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -225,9 +195,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -246,9 +214,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -277,7 +243,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -316,6 +282,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -349,10 +316,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -367,9 +331,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -388,9 +350,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -425,9 +385,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -469,9 +427,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-JrLxU",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -505,9 +461,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -520,9 +474,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -542,9 +494,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -600,9 +550,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -622,11 +570,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -647,10 +591,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -666,9 +607,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -688,9 +627,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -726,9 +663,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -883,9 +818,7 @@
"display_name": "Structured Output",
"id": "StructuredOutputComponent-Kqbq4",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -913,9 +846,7 @@
"method": "build_structured_output",
"name": "structured_output",
"selected": "Data",
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -946,9 +877,7 @@
"display_name": "Input message",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -968,9 +897,7 @@
"display_name": "Language Model",
"dynamic": false,
"info": "The language model to use to generate the structured output.",
"input_types": [
"LanguageModel"
],
"input_types": ["LanguageModel"],
"list": false,
"name": "llm",
"placeholder": "",
@ -1162,9 +1089,7 @@
"data": {
"id": "ParseData-7XOFR",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "helpers",
"conditional_paths": [],
@ -1173,11 +1098,7 @@
"display_name": "Parse Data",
"documentation": "",
"edited": false,
"field_order": [
"data",
"template",
"sep"
],
"field_order": ["data", "template", "sep"],
"frozen": false,
"icon": "message-square",
"key": "ParseData",
@ -1196,9 +1117,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1209,9 +1128,7 @@
"name": "data_list",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
}
],
@ -1242,9 +1159,7 @@
"display_name": "Data",
"dynamic": false,
"info": "The data to convert to text.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"list": true,
"name": "data",
"placeholder": "",
@ -1279,9 +1194,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1324,9 +1237,7 @@
"display_name": "Agent",
"id": "Agent-dcKuR",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1377,9 +1288,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1408,9 +1317,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1461,9 +1368,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1514,9 +1419,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -1608,9 +1511,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"name": "memory",
"placeholder": "",
@ -1704,10 +1605,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"placeholder": "",
"required": false,
"show": true,
@ -1741,11 +1639,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"placeholder": "",
"required": false,
"show": true,
@ -1761,9 +1655,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -1783,9 +1675,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -1805,9 +1695,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1844,9 +1732,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1885,9 +1771,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"name": "tools",
"placeholder": "",
@ -1972,10 +1856,7 @@
"display_name": "Tavily AI Search",
"id": "TavilySearchComponent-cGK9T",
"node": {
"base_classes": [
"Data",
"Message"
],
"base_classes": ["Data", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -2010,9 +1891,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -2025,9 +1904,7 @@
"display_name": "Tavily API Key",
"dynamic": false,
"info": "Your Tavily API Key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -2116,9 +1993,7 @@
"display_name": "Search Query",
"dynamic": false,
"info": "The search query you want to execute with Tavily.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2142,10 +2017,7 @@
"dynamic": false,
"info": "The depth of the search.",
"name": "search_depth",
"options": [
"basic",
"advanced"
],
"options": ["basic", "advanced"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2165,12 +2037,7 @@
"dynamic": false,
"info": "The time range back from the current date to include in the search results.",
"name": "time_range",
"options": [
"day",
"week",
"month",
"year"
],
"options": ["day", "week", "month", "year"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2205,10 +2072,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -2262,16 +2126,12 @@
{
"description": "fetch_content(api_key: Message) - **Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results.",
"name": "TavilySearchComponent-fetch_content",
"tags": [
"TavilySearchComponent-fetch_content"
]
"tags": ["TavilySearchComponent-fetch_content"]
},
{
"description": "fetch_content_text(api_key: Message) - **Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results.",
"name": "TavilySearchComponent-fetch_content_text",
"tags": [
"TavilySearchComponent-fetch_content_text"
]
"tags": ["TavilySearchComponent-fetch_content_text"]
}
]
},
@ -2284,10 +2144,7 @@
"dynamic": false,
"info": "The category of the search.",
"name": "topic",
"options": [
"general",
"news"
],
"options": ["general", "news"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2321,10 +2178,7 @@
"data": {
"id": "OpenAIModel-prL67",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -2363,9 +2217,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -2374,14 +2226,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -2395,9 +2243,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -2432,9 +2278,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2616,9 +2460,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2714,8 +2556,5 @@
"is_component": false,
"last_tested_version": "1.1.1",
"name": "Market Research",
"tags": [
"assistants",
"agents"
]
}
"tags": ["assistants", "agents"]
}

View file

@ -8,17 +8,12 @@
"dataType": "Memory",
"id": "Memory-gWJrq",
"name": "messages_text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "memory",
"id": "Prompt-yhdMP",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -36,16 +31,12 @@
"dataType": "ChatInput",
"id": "ChatInput-PEO9d",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-63o3Q",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -63,16 +54,12 @@
"dataType": "Prompt",
"id": "Prompt-yhdMP",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-63o3Q",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -90,18 +77,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-63o3Q",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-BIXzI",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -118,9 +99,7 @@
"data": {
"id": "ChatInput-PEO9d",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -154,9 +133,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -169,9 +146,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -191,9 +166,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -223,7 +196,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -262,6 +235,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -296,10 +270,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -315,9 +286,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -337,9 +306,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -375,9 +342,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -421,9 +386,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-BIXzI",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -457,9 +420,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -472,9 +433,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -494,9 +453,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -552,9 +509,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -574,11 +529,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -599,10 +550,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -618,9 +566,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -640,9 +586,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -678,9 +622,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -789,10 +731,7 @@
"data": {
"id": "Memory-gWJrq",
"node": {
"base_classes": [
"Data",
"Message"
],
"base_classes": ["Data", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -824,9 +763,7 @@
"name": "messages",
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
},
{
@ -837,9 +774,7 @@
"name": "messages_text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -850,9 +785,7 @@
"name": "dataframe",
"selected": "DataFrame",
"tool_mode": true,
"types": [
"DataFrame"
],
"types": ["DataFrame"],
"value": "__UNDEFINED__"
}
],
@ -883,9 +816,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"name": "memory",
"placeholder": "",
@ -920,10 +851,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"placeholder": "",
"required": false,
"show": true,
@ -941,11 +869,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"placeholder": "",
"required": false,
"show": true,
@ -961,9 +885,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -983,9 +905,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -1005,9 +925,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1050,24 +968,18 @@
"data": {
"id": "Prompt-yhdMP",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
"template": [
"memory"
]
"template": ["memory"]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"documentation": "",
"edited": false,
"error": null,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"full_path": null,
"icon": "prompts",
@ -1088,9 +1000,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1123,10 +1033,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1161,9 +1068,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1205,10 +1110,7 @@
"data": {
"id": "OpenAIModel-63o3Q",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -1247,9 +1149,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1258,14 +1158,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -1279,9 +1175,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1316,9 +1210,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1500,9 +1392,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1598,9 +1488,5 @@
"is_component": false,
"last_tested_version": "1.0.19.post2",
"name": "Memory Chatbot",
"tags": [
"chatbots",
"openai",
"assistants"
]
}
"tags": ["chatbots", "openai", "assistants"]
}

View file

@ -9,16 +9,12 @@
"dataType": "AgentQL",
"id": "AgentQL-QrQyl",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-DlbOP",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -37,16 +33,12 @@
"dataType": "ChatInput",
"id": "ChatInput-rZZHB",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-DlbOP",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -64,18 +56,12 @@
"dataType": "Agent",
"id": "Agent-DlbOP",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-tCBqx",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "other"
}
},
@ -147,9 +133,7 @@
"display_name": "AgentQL Query Data",
"id": "AgentQL-QrQyl",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -186,9 +170,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -201,9 +183,7 @@
"display_name": "API Key",
"dynamic": false,
"info": "Your AgentQL API key from dev.agentql.com",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -295,10 +275,7 @@
"dynamic": false,
"info": "'standard' uses deep data analysis, while 'fast' trades some depth of analysis for speed.",
"name": "mode",
"options": [
"fast",
"standard"
],
"options": ["fast", "standard"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -316,9 +293,7 @@
"display_name": "Prompt",
"dynamic": false,
"info": "A Natural Language description of the data to extract from the page. Alternative to AgentQL query.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -341,9 +316,7 @@
"display_name": "AgentQL Query",
"dynamic": false,
"info": "The AgentQL query to execute. Learn more at https://docs.agentql.com/agentql-query or use a prompt.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -402,10 +375,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -489,9 +459,7 @@
"description": "build_output(api_key: Message, url: Message) - Extracts structured data from a web page using an AgentQL query or a Natural Language description.",
"name": "AgentQL-build_output",
"status": true,
"tags": [
"AgentQL-build_output"
]
"tags": ["AgentQL-build_output"]
}
]
},
@ -501,9 +469,7 @@
"display_name": "URL",
"dynamic": false,
"info": "The URL of the public web page you want to extract data from.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -565,9 +531,7 @@
"data": {
"id": "ChatInput-rZZHB",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -604,9 +568,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -620,9 +582,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -643,9 +603,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -676,7 +634,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -716,6 +674,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -752,10 +711,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -772,9 +728,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -795,9 +749,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -836,9 +788,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -876,9 +826,7 @@
"data": {
"id": "ChatOutput-tCBqx",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -913,9 +861,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -928,9 +874,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -951,9 +895,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1010,9 +952,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1033,11 +973,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"name": "input_value",
@ -1058,10 +994,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1078,9 +1011,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1101,9 +1032,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1142,9 +1071,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1182,9 +1109,7 @@
"data": {
"id": "Agent-DlbOP",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1235,9 +1160,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1268,9 +1191,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1352,9 +1273,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1407,9 +1326,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1508,9 +1425,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -1615,10 +1530,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1656,11 +1568,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1677,9 +1585,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1700,9 +1606,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1723,9 +1627,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1775,9 +1677,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1817,9 +1717,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -1907,8 +1805,5 @@
"is_component": false,
"last_tested_version": "1.2.0",
"name": "News Aggregator",
"tags": [
"web-scraping",
"agents"
]
}
"tags": ["web-scraping", "agents"]
}

View file

@ -9,16 +9,12 @@
"dataType": "APIRequest",
"id": "APIRequest-KJLNf",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-gZkrx",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -37,16 +33,12 @@
"dataType": "ChatInput",
"id": "ChatInput-eumKo",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-gZkrx",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -65,18 +57,12 @@
"dataType": "Agent",
"id": "Agent-gZkrx",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-D4JWF",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -95,10 +81,7 @@
"display_name": "API Request",
"id": "APIRequest-KJLNf",
"node": {
"base_classes": [
"Data",
"DataFrame"
],
"base_classes": ["Data", "DataFrame"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -137,9 +120,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -152,9 +133,7 @@
"display_name": "Body",
"dynamic": false,
"info": "The body to send with the request as a dictionary (for POST, PATCH, PUT).",
"input_types": [
"Data"
],
"input_types": ["Data"],
"is_list": true,
"list_add_label": "Add More",
"name": "body",
@ -223,9 +202,7 @@
"display_name": "cURL",
"dynamic": false,
"info": "Paste a curl command to populate the fields. This will fill in the dictionary fields for headers and body.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -266,9 +243,7 @@
"display_name": "Headers",
"dynamic": false,
"info": "The headers to send with the request as a dictionary.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"is_list": true,
"list_add_label": "Add More",
"name": "headers",
@ -342,13 +317,7 @@
"info": "The HTTP method to use.",
"load_from_db": false,
"name": "method",
"options": [
"GET",
"POST",
"PATCH",
"PUT",
"DELETE"
],
"options": ["GET", "POST", "PATCH", "PUT", "DELETE"],
"options_metadata": [],
"placeholder": "",
"real_time_refresh": true,
@ -366,9 +335,7 @@
"display_name": "Query Parameters",
"dynamic": false,
"info": "The query parameters to append to the URL.",
"input_types": [
"Data"
],
"input_types": ["Data"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -444,10 +411,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -518,17 +482,13 @@
"description": "make_requests() - Make HTTP requests using URLs or cURL commands.",
"name": "APIRequest-make_requests",
"status": true,
"tags": [
"APIRequest-make_requests"
]
"tags": ["APIRequest-make_requests"]
},
{
"description": "as_dataframe() - Make HTTP requests using URLs or cURL commands.",
"name": "APIRequest-as_dataframe",
"status": true,
"tags": [
"APIRequest-as_dataframe"
]
"tags": ["APIRequest-as_dataframe"]
}
]
},
@ -538,9 +498,7 @@
"display_name": "URLs",
"dynamic": false,
"info": "Enter one or more URLs, separated by commas.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": true,
"list_add_label": "Add More",
"load_from_db": false,
@ -553,9 +511,7 @@
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": [
""
]
"value": [""]
},
"use_curl": {
"_input_type": "BoolInput",
@ -599,9 +555,7 @@
"data": {
"id": "ChatInput-eumKo",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -638,9 +592,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -654,9 +606,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -677,9 +627,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -710,7 +658,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -750,6 +698,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -786,10 +735,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -806,9 +752,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -829,9 +773,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -870,9 +812,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -910,9 +850,7 @@
"data": {
"id": "ChatOutput-D4JWF",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "outputs",
"conditional_paths": [],
@ -949,9 +887,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -965,9 +901,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -988,9 +922,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1047,9 +979,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1070,11 +1000,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1098,10 +1024,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1118,9 +1041,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1141,9 +1062,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1182,9 +1101,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1306,9 +1223,7 @@
"data": {
"id": "Agent-gZkrx",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "agents",
"conditional_paths": [],
@ -1361,9 +1276,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1396,9 +1309,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1480,9 +1391,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1535,9 +1444,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1636,9 +1543,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -1743,10 +1648,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1784,11 +1686,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1805,9 +1703,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1828,9 +1724,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1852,9 +1746,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1905,9 +1797,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1947,9 +1837,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -2010,7 +1898,5 @@
"is_component": false,
"last_tested_version": "1.2.0",
"name": "Pokédex Agent",
"tags": [
"agents"
]
}
"tags": ["agents"]
}

View file

@ -2297,7 +2297,7 @@
"path": {
"_input_type": "FileInput",
"advanced": false,
"display_name": "Path",
"display_name": "Files",
"dynamic": false,
"fileTypes": [
"txt",
@ -2326,7 +2326,7 @@
],
"file_path": "",
"info": "Supported file extensions: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx; optionally bundled in file extensions: zip, tar, tgz, bz2, gz",
"list": false,
"list": true,
"list_add_label": "Add More",
"name": "path",
"placeholder": "",

View file

@ -9,16 +9,12 @@
"dataType": "AgentQL",
"id": "AgentQL-FEfZe",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-C2lNW",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -37,16 +33,12 @@
"dataType": "TavilySearchComponent",
"id": "TavilySearchComponent-ilBh2",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-C2lNW",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -65,16 +57,12 @@
"dataType": "ChatInput",
"id": "ChatInput-ikOpG",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-C2lNW",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -92,18 +80,12 @@
"dataType": "Agent",
"id": "Agent-C2lNW",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-uu7cZ",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "other"
}
},
@ -120,9 +102,7 @@
"data": {
"id": "ChatInput-ikOpG",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -159,9 +139,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -175,9 +153,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -198,9 +174,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -231,7 +205,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -271,6 +245,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -307,10 +282,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -327,9 +299,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -350,9 +320,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -391,9 +359,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -431,9 +397,7 @@
"data": {
"id": "ChatOutput-uu7cZ",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -468,9 +432,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -483,9 +445,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -506,9 +466,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -565,9 +523,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -588,11 +544,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"name": "input_value",
@ -613,10 +565,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -633,9 +582,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -656,9 +603,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -697,9 +642,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -737,10 +680,7 @@
"data": {
"id": "TavilySearchComponent-ilBh2",
"node": {
"base_classes": [
"Data",
"Message"
],
"base_classes": ["Data", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -776,9 +716,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -791,9 +729,7 @@
"display_name": "Tavily API Key",
"dynamic": false,
"info": "Your Tavily API Key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -882,9 +818,7 @@
"display_name": "Search Query",
"dynamic": false,
"info": "The search query you want to execute with Tavily.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -908,10 +842,7 @@
"dynamic": false,
"info": "The depth of the search.",
"name": "search_depth",
"options": [
"basic",
"advanced"
],
"options": ["basic", "advanced"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -931,12 +862,7 @@
"dynamic": false,
"info": "The time range back from the current date to include in the search results.",
"name": "time_range",
"options": [
"day",
"week",
"month",
"year"
],
"options": ["day", "week", "month", "year"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -971,10 +897,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -1025,16 +948,12 @@
{
"description": "fetch_content(api_key: Message) - **Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results.",
"name": "TavilySearchComponent-fetch_content",
"tags": [
"TavilySearchComponent-fetch_content"
]
"tags": ["TavilySearchComponent-fetch_content"]
},
{
"description": "fetch_content_text(api_key: Message) - **Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results.",
"name": "TavilySearchComponent-fetch_content_text",
"tags": [
"TavilySearchComponent-fetch_content_text"
]
"tags": ["TavilySearchComponent-fetch_content_text"]
}
]
},
@ -1047,10 +966,7 @@
"dynamic": false,
"info": "The category of the search.",
"name": "topic",
"options": [
"general",
"news"
],
"options": ["general", "news"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1086,9 +1002,7 @@
"display_name": "AgentQL Query Data",
"id": "AgentQL-FEfZe",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1125,9 +1039,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -1140,9 +1052,7 @@
"display_name": "API Key",
"dynamic": false,
"info": "Your AgentQL API key from dev.agentql.com",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -1234,10 +1144,7 @@
"dynamic": false,
"info": "'standard' uses deep data analysis, while 'fast' trades some depth of analysis for speed.",
"name": "mode",
"options": [
"fast",
"standard"
],
"options": ["fast", "standard"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1255,9 +1162,7 @@
"display_name": "Prompt",
"dynamic": false,
"info": "A Natural Language description of the data to extract from the page. Alternative to AgentQL query.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1280,9 +1185,7 @@
"display_name": "AgentQL Query",
"dynamic": false,
"info": "The AgentQL query to execute. Learn more at https://docs.agentql.com/agentql-query or use a prompt.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1341,10 +1244,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -1428,9 +1328,7 @@
"description": "build_output(api_key: Message, url: Message) - Extracts structured data from a web page using an AgentQL query or a Natural Language description.",
"name": "AgentQL-build_output",
"status": true,
"tags": [
"AgentQL-build_output"
]
"tags": ["AgentQL-build_output"]
}
]
},
@ -1440,9 +1338,7 @@
"display_name": "URL",
"dynamic": false,
"info": "The URL of the public web page you want to extract data from.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1504,9 +1400,7 @@
"data": {
"id": "Agent-C2lNW",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1557,9 +1451,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1590,9 +1482,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1674,9 +1564,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1729,9 +1617,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1830,9 +1716,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -1937,10 +1821,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1978,11 +1859,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1999,9 +1876,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2022,9 +1897,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2045,9 +1918,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2097,9 +1968,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2139,9 +2008,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -2312,8 +2179,5 @@
"is_component": false,
"last_tested_version": "1.2.0",
"name": "Price Deal Finder",
"tags": [
"web-scraping",
"agents"
]
}
"tags": ["web-scraping", "agents"]
}

View file

@ -9,17 +9,12 @@
"dataType": "ChatInput",
"id": "ChatInput-ODlE4",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Prompt-0159s",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -38,16 +33,12 @@
"dataType": "Prompt",
"id": "Prompt-Lj4w8",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-gNoaI",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -66,17 +57,12 @@
"dataType": "Agent",
"id": "Agent-gNoaI",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "search_results",
"id": "Prompt-0159s",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -94,16 +80,12 @@
"dataType": "TavilySearchComponent",
"id": "TavilySearchComponent-dk7WW",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-gNoaI",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -120,16 +102,12 @@
"dataType": "Prompt",
"id": "Prompt-5lo5d",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-1sVXW",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -146,16 +124,12 @@
"dataType": "ChatInput",
"id": "ChatInput-ODlE4",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-1sVXW",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -172,17 +146,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-1sVXW",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "previous_response",
"id": "Prompt-Lj4w8",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -199,16 +168,12 @@
"dataType": "Prompt",
"id": "Prompt-0159s",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-Rg8Kn",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -225,16 +190,12 @@
"dataType": "Prompt",
"id": "Prompt-4HHcE",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-Rg8Kn",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -250,18 +211,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-Rg8Kn",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-SIV0E",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "other"
}
},
@ -279,23 +234,17 @@
"display_name": "Prompt",
"id": "Prompt-Lj4w8",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
"template": [
"previous_response"
]
"template": ["previous_response"]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -311,9 +260,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -346,10 +293,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -385,9 +329,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -429,9 +371,7 @@
"data": {
"id": "ChatInput-ODlE4",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -467,9 +407,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -482,9 +420,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -503,9 +439,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -534,7 +468,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -573,6 +507,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -606,10 +541,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -624,9 +556,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -645,9 +575,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -682,9 +610,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -726,24 +652,17 @@
"display_name": "Prompt",
"id": "Prompt-0159s",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
"template": [
"search_results",
"input_value"
]
"template": ["search_results", "input_value"]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -759,9 +678,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -794,10 +711,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -817,10 +731,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -856,9 +767,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -939,9 +848,7 @@
"display_name": "Agent",
"id": "Agent-gNoaI",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -992,9 +899,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1023,9 +928,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1076,9 +979,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1129,9 +1030,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -1223,9 +1122,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"name": "memory",
"placeholder": "",
@ -1319,10 +1216,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"placeholder": "",
"required": false,
"show": true,
@ -1356,11 +1250,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"placeholder": "",
"required": false,
"show": true,
@ -1376,9 +1266,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -1398,9 +1286,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -1420,9 +1306,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1459,9 +1343,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1500,9 +1382,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"name": "tools",
"placeholder": "",
@ -1559,9 +1439,7 @@
"display_name": "Prompt",
"id": "Prompt-5lo5d",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -1571,9 +1449,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -1589,9 +1465,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1640,9 +1514,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1686,9 +1558,7 @@
"display_name": "Prompt",
"id": "Prompt-4HHcE",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -1698,9 +1568,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -1716,9 +1584,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1767,9 +1633,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1843,10 +1707,7 @@
"data": {
"id": "TavilySearchComponent-dk7WW",
"node": {
"base_classes": [
"Data",
"Message"
],
"base_classes": ["Data", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1881,9 +1742,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -1896,9 +1755,7 @@
"display_name": "Tavily API Key",
"dynamic": false,
"info": "Your Tavily API Key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -1987,9 +1844,7 @@
"display_name": "Search Query",
"dynamic": false,
"info": "The search query you want to execute with Tavily.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2013,10 +1868,7 @@
"dynamic": false,
"info": "The depth of the search.",
"name": "search_depth",
"options": [
"basic",
"advanced"
],
"options": ["basic", "advanced"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2036,12 +1888,7 @@
"dynamic": false,
"info": "The time range back from the current date to include in the search results.",
"name": "time_range",
"options": [
"day",
"week",
"month",
"year"
],
"options": ["day", "week", "month", "year"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2076,10 +1923,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -2133,16 +1977,12 @@
{
"description": "fetch_content(api_key: Message) - **Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results.",
"name": "TavilySearchComponent-fetch_content",
"tags": [
"TavilySearchComponent-fetch_content"
]
"tags": ["TavilySearchComponent-fetch_content"]
},
{
"description": "fetch_content_text(api_key: Message) - **Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results.",
"name": "TavilySearchComponent-fetch_content_text",
"tags": [
"TavilySearchComponent-fetch_content_text"
]
"tags": ["TavilySearchComponent-fetch_content_text"]
}
]
},
@ -2155,10 +1995,7 @@
"dynamic": false,
"info": "The category of the search.",
"name": "topic",
"options": [
"general",
"news"
],
"options": ["general", "news"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2192,10 +2029,7 @@
"data": {
"id": "OpenAIModel-1sVXW",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -2234,9 +2068,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -2245,14 +2077,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -2266,9 +2094,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -2303,9 +2129,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2487,9 +2311,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2574,10 +2396,7 @@
"data": {
"id": "OpenAIModel-Rg8Kn",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -2616,9 +2435,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -2627,14 +2444,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -2648,9 +2461,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -2685,9 +2496,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2869,9 +2678,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2956,9 +2763,7 @@
"data": {
"id": "ChatOutput-SIV0E",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "outputs",
"conditional_paths": [],
@ -2995,9 +2800,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -3011,9 +2814,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3034,9 +2835,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3093,9 +2892,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3116,11 +2913,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"name": "input_value",
@ -3141,10 +2934,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -3161,9 +2951,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3184,9 +2972,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3225,9 +3011,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3275,8 +3059,5 @@
"is_component": false,
"last_tested_version": "1.1.5",
"name": "Research Agent",
"tags": [
"assistants",
"agents"
]
}
"tags": ["assistants", "agents"]
}

View file

@ -9,16 +9,12 @@
"dataType": "ScrapeGraphSearchApi",
"id": "ScrapeGraphSearchApi-4qF2k",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-CBAEF",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -36,16 +32,12 @@
"dataType": "ChatInput",
"id": "ChatInput-8hLIp",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-CBAEF",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -63,18 +55,12 @@
"dataType": "Agent",
"id": "Agent-CBAEF",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-9EKQJ",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "other"
}
},
@ -90,9 +76,7 @@
"data": {
"id": "ScrapeGraphSearchApi-4qF2k",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -100,10 +84,7 @@
"display_name": "ScrapeGraphSearchApi",
"documentation": "https://docs.scrapegraphai.com/introduction",
"edited": false,
"field_order": [
"api_key",
"user_prompt"
],
"field_order": ["api_key", "user_prompt"],
"frozen": false,
"icon": "ScrapeGraph",
"legacy": false,
@ -122,9 +103,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -137,9 +116,7 @@
"display_name": "ScrapeGraph API Key",
"dynamic": false,
"info": "The API key to use ScrapeGraph API.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -193,10 +170,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -250,9 +224,7 @@
{
"description": "search(api_key: Message) - ScrapeGraph Search API.\n Given a search prompt, it will return search results using ScrapeGraph's search functionality.\n More info at https://docs.scrapegraphai.com/services/searchscraper",
"name": "ScrapeGraphSearchApi-search",
"tags": [
"ScrapeGraphSearchApi-search"
]
"tags": ["ScrapeGraphSearchApi-search"]
}
]
},
@ -262,9 +234,7 @@
"display_name": "Search Prompt",
"dynamic": false,
"info": "The search prompt to use.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -302,9 +272,7 @@
"data": {
"id": "Agent-CBAEF",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -356,9 +324,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -389,9 +355,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -445,9 +409,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -500,9 +462,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -601,9 +561,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -708,10 +666,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -749,11 +704,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -770,9 +721,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -793,9 +742,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -816,9 +763,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -868,9 +813,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -910,9 +853,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -965,9 +906,7 @@
"data": {
"id": "ChatInput-8hLIp",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1002,9 +941,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1017,9 +954,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1040,9 +975,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1073,7 +1006,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -1113,6 +1046,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -1149,10 +1083,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1169,9 +1100,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1192,9 +1121,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1233,9 +1160,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1273,9 +1198,7 @@
"data": {
"id": "ChatOutput-9EKQJ",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1311,9 +1234,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1326,9 +1247,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1349,9 +1268,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1408,9 +1325,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1431,11 +1346,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"name": "input_value",
@ -1456,10 +1367,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1476,9 +1384,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1499,9 +1405,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1540,9 +1444,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1673,9 +1575,5 @@
"is_component": false,
"last_tested_version": "1.1.5",
"name": "Search agent",
"tags": [
"web-scraping",
"agents",
"assistants"
]
}
"tags": ["web-scraping", "agents", "assistants"]
}

View file

@ -9,16 +9,12 @@
"dataType": "ChatInput",
"id": "ChatInput-5rifq",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-eACmP",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -37,18 +33,12 @@
"dataType": "Agent",
"id": "Agent-eACmP",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-jYRjS",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -67,16 +57,12 @@
"dataType": "CalculatorComponent",
"id": "CalculatorComponent-GTSkO",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-eACmP",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -94,16 +80,12 @@
"dataType": "URL",
"id": "URL-BvxUK",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-eACmP",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -121,9 +103,7 @@
"display_name": "Agent",
"id": "Agent-eACmP",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -174,9 +154,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -205,9 +183,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -258,9 +234,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -311,9 +285,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -405,9 +377,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"name": "memory",
"placeholder": "",
@ -501,10 +471,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"placeholder": "",
"required": false,
"show": true,
@ -538,11 +505,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"placeholder": "",
"required": false,
"show": true,
@ -558,9 +521,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -580,9 +541,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -602,9 +561,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -641,9 +598,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -682,9 +637,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"name": "tools",
"placeholder": "",
@ -733,9 +686,7 @@
"data": {
"id": "ChatInput-5rifq",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -769,9 +720,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -784,9 +733,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -806,9 +753,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -838,7 +783,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -877,6 +822,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -911,10 +857,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -930,9 +873,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -952,9 +893,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -990,9 +929,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -1030,9 +967,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-jYRjS",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1066,9 +1001,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1081,9 +1014,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -1103,9 +1034,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -1161,9 +1090,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -1183,11 +1110,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -1208,10 +1131,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -1227,9 +1147,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -1249,9 +1167,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -1287,9 +1203,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -1326,11 +1240,7 @@
"display_name": "URL",
"id": "URL-BvxUK",
"node": {
"base_classes": [
"Data",
"DataFrame",
"Message"
],
"base_classes": ["Data", "DataFrame", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1362,9 +1272,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -1416,11 +1324,7 @@
"dynamic": false,
"info": "Output Format. Use 'Text' to extract text from the HTML, 'Raw HTML' for the raw HTML content, or 'JSON' to extract JSON from the HTML.",
"name": "format",
"options": [
"Text",
"Raw HTML",
"JSON"
],
"options": ["Text", "Raw HTML", "JSON"],
"options_metadata": [],
"placeholder": "",
"real_time_refresh": true,
@ -1476,10 +1380,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -1536,23 +1437,17 @@
{
"description": "fetch_content() - Load and retrieve data from specified URLs. Supports output in plain text, raw HTML, or JSON, with options for cleaning and separating multiple outputs.",
"name": "URL-fetch_content",
"tags": [
"URL-fetch_content"
]
"tags": ["URL-fetch_content"]
},
{
"description": "fetch_content_text() - Load and retrieve data from specified URLs. Supports output in plain text, raw HTML, or JSON, with options for cleaning and separating multiple outputs.",
"name": "URL-fetch_content_text",
"tags": [
"URL-fetch_content_text"
]
"tags": ["URL-fetch_content_text"]
},
{
"description": "as_dataframe() - Load and retrieve data from specified URLs. Supports output in plain text, raw HTML, or JSON, with options for cleaning and separating multiple outputs.",
"name": "URL-as_dataframe",
"tags": [
"URL-as_dataframe"
]
"tags": ["URL-as_dataframe"]
}
]
},
@ -1562,9 +1457,7 @@
"display_name": "URLs",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": true,
"list_add_label": "Add URL",
"load_from_db": false,
@ -1652,9 +1545,7 @@
"data": {
"id": "CalculatorComponent-GTSkO",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"category": "tools",
"conditional_paths": [],
@ -1663,9 +1554,7 @@
"display_name": "Calculator",
"documentation": "",
"edited": false,
"field_order": [
"expression"
],
"field_order": ["expression"],
"frozen": false,
"icon": "calculator",
"key": "CalculatorComponent",
@ -1685,9 +1574,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -1719,9 +1606,7 @@
"display_name": "Expression",
"dynamic": false,
"info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1761,10 +1646,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -1821,9 +1703,7 @@
{
"description": "evaluate_expression() - Perform basic arithmetic operations on a given expression.",
"name": "None-evaluate_expression",
"tags": [
"None-evaluate_expression"
]
"tags": ["None-evaluate_expression"]
}
]
}
@ -1859,8 +1739,5 @@
"is_component": false,
"last_tested_version": "1.1.1",
"name": "Simple Agent",
"tags": [
"assistants",
"agents"
]
}
"tags": ["assistants", "agents"]
}

View file

@ -7,16 +7,12 @@
"dataType": "ApifyActors",
"id": "ApifyActors-B3OZd",
"name": "tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-KXbzb",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -32,16 +28,12 @@
"dataType": "ApifyActors",
"id": "ApifyActors-0TUqW",
"name": "tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-KXbzb",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -57,16 +49,12 @@
"dataType": "ChatInput",
"id": "ChatInput-3C2ac",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-KXbzb",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -82,18 +70,12 @@
"dataType": "Agent",
"id": "Agent-KXbzb",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-lTzgN",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "other"
}
},
@ -109,10 +91,7 @@
"data": {
"id": "ApifyActors-0TUqW",
"node": {
"base_classes": [
"Data",
"Tool"
],
"base_classes": ["Data", "Tool"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -144,9 +123,7 @@
"required_inputs": null,
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
},
{
@ -159,9 +136,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -193,9 +168,7 @@
"display_name": "Apify Token",
"dynamic": false,
"info": "The API token for the Apify account.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "apify_token",
"password": true,
@ -230,9 +203,7 @@
"display_name": "Output fields",
"dynamic": false,
"info": "Fields to extract from the dataset, split by commas. Other fields will be ignored. Dots in nested structures will be replaced by underscores. Sample input: 'text, metadata.title'. Sample output: {'text': 'page content here', 'metadata_title': 'page title here'}. For example, for the 'apify/website-content-crawler' Actor, you can extract the 'markdown' field, which is the content of the website in markdown format.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -272,9 +243,7 @@
"display_name": "Run input",
"dynamic": false,
"info": "The JSON input for the Actor run. For example for the \"apify/website-content-crawler\" Actor: {\"startUrls\":[{\"url\":\"https://docs.apify.com/academy/web-scraping-for-beginners\"}],\"maxCrawlDepth\":0}",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -313,10 +282,7 @@
"data": {
"id": "ApifyActors-B3OZd",
"node": {
"base_classes": [
"Data",
"Tool"
],
"base_classes": ["Data", "Tool"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -348,9 +314,7 @@
"required_inputs": null,
"selected": "Data",
"tool_mode": true,
"types": [
"Data"
],
"types": ["Data"],
"value": "__UNDEFINED__"
},
{
@ -363,9 +327,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -397,9 +359,7 @@
"display_name": "Apify Token",
"dynamic": false,
"info": "The API token for the Apify account.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "apify_token",
"password": true,
@ -434,9 +394,7 @@
"display_name": "Output fields",
"dynamic": false,
"info": "Fields to extract from the dataset, split by commas. Other fields will be ignored. Dots in nested structures will be replaced by underscores. Sample input: 'text, metadata.title'. Sample output: {'text': 'page content here', 'metadata_title': 'page title here'}. For example, for the 'apify/website-content-crawler' Actor, you can extract the 'markdown' field, which is the content of the website in markdown format.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -476,9 +434,7 @@
"display_name": "Run input",
"dynamic": false,
"info": "The JSON input for the Actor run. For example for the \"apify/website-content-crawler\" Actor: {\"startUrls\":[{\"url\":\"https://docs.apify.com/academy/web-scraping-for-beginners\"}],\"maxCrawlDepth\":0}",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -601,9 +557,7 @@
"data": {
"id": "ChatInput-3C2ac",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -638,9 +592,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -653,9 +605,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -676,9 +626,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -709,7 +657,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -749,6 +697,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -785,10 +734,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -805,9 +751,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -828,9 +772,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -869,9 +811,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -909,9 +849,7 @@
"data": {
"id": "ChatOutput-lTzgN",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -947,9 +885,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -962,9 +898,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -985,9 +919,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1044,9 +976,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1067,11 +997,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"name": "input_value",
@ -1092,10 +1018,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1112,9 +1035,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1135,9 +1056,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1176,9 +1095,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1242,9 +1159,7 @@
"data": {
"id": "Agent-KXbzb",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "agents",
"conditional_paths": [],
@ -1297,9 +1212,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1332,9 +1245,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1416,9 +1327,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -1471,9 +1380,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1572,9 +1479,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -1679,10 +1584,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1720,11 +1622,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1741,9 +1639,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1764,9 +1660,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1788,9 +1682,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1841,9 +1733,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1883,9 +1773,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -1947,8 +1835,5 @@
"is_component": false,
"last_tested_version": "1.2.0",
"name": "Social Media Agent",
"tags": [
"agent",
"assistants"
]
}
"tags": ["agent", "assistants"]
}

View file

@ -9,18 +9,12 @@
"dataType": "Agent",
"id": "Agent-PKpSO",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-8np0X",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -39,16 +33,12 @@
"dataType": "Agent",
"id": "Agent-zOYup",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-PKpSO",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -67,16 +57,12 @@
"dataType": "Agent",
"id": "Agent-7K58a",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-zOYup",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -95,16 +81,12 @@
"dataType": "ChatInput",
"id": "ChatInput-iZoDa",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-7K58a",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -123,16 +105,12 @@
"dataType": "URL",
"id": "URL-j9slU",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-zOYup",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -151,16 +129,12 @@
"dataType": "CalculatorComponent",
"id": "CalculatorComponent-L3y5A",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-PKpSO",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -179,16 +153,12 @@
"dataType": "SearchComponent",
"id": "SearchComponent-8lQPB",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-7K58a",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -205,9 +175,7 @@
"data": {
"id": "ChatInput-iZoDa",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -238,9 +206,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -253,9 +219,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -274,9 +238,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -305,7 +267,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -344,6 +306,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -377,10 +340,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -395,9 +355,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -416,9 +374,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -453,9 +409,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -497,9 +451,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-8np0X",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -533,9 +485,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -548,9 +498,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -570,9 +518,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -628,9 +574,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -650,11 +594,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -675,10 +615,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -694,9 +631,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -716,9 +651,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -754,9 +687,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -800,9 +731,7 @@
"display_name": "City Selection Agent",
"id": "Agent-7K58a",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -853,9 +782,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -884,9 +811,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -937,9 +862,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -990,9 +913,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -1084,9 +1005,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"name": "memory",
"placeholder": "",
@ -1180,10 +1099,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"placeholder": "",
"required": false,
"show": true,
@ -1217,11 +1133,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"placeholder": "",
"required": false,
"show": true,
@ -1237,9 +1149,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -1259,9 +1169,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -1281,9 +1189,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1320,9 +1226,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1361,9 +1265,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"name": "tools",
"placeholder": "",
@ -1420,9 +1322,7 @@
"display_name": "Local Expert Agent",
"id": "Agent-zOYup",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1473,9 +1373,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1504,9 +1402,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1557,9 +1453,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1610,9 +1504,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -1704,9 +1596,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"name": "memory",
"placeholder": "",
@ -1800,10 +1690,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"placeholder": "",
"required": false,
"show": true,
@ -1837,11 +1724,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"placeholder": "",
"required": false,
"show": true,
@ -1857,9 +1740,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -1879,9 +1760,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -1901,9 +1780,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1940,9 +1817,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1981,9 +1856,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"name": "tools",
"placeholder": "",
@ -2040,9 +1913,7 @@
"display_name": "Travel Concierge Agent",
"id": "Agent-PKpSO",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -2093,9 +1964,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -2124,9 +1993,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -2177,9 +2044,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -2230,9 +2095,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -2324,9 +2187,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"name": "memory",
"placeholder": "",
@ -2420,10 +2281,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"placeholder": "",
"required": false,
"show": true,
@ -2457,11 +2315,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"placeholder": "",
"required": false,
"show": true,
@ -2477,9 +2331,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -2499,9 +2351,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -2521,9 +2371,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -2560,9 +2408,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -2601,9 +2447,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"name": "tools",
"placeholder": "",
@ -2839,11 +2683,7 @@
"display_name": "URL",
"id": "URL-j9slU",
"node": {
"base_classes": [
"Data",
"DataFrame",
"Message"
],
"base_classes": ["Data", "DataFrame", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -2874,9 +2714,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -2928,11 +2766,7 @@
"dynamic": false,
"info": "Output Format. Use 'Text' to extract text from the HTML, 'Raw HTML' for the raw HTML content, or 'JSON' to extract JSON from the HTML.",
"name": "format",
"options": [
"Text",
"Raw HTML",
"JSON"
],
"options": ["Text", "Raw HTML", "JSON"],
"options_metadata": [],
"placeholder": "",
"real_time_refresh": true,
@ -2988,10 +2822,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -3048,23 +2879,17 @@
{
"description": "fetch_content() - Load and retrieve data from specified URLs. Supports output in plain text, raw HTML, or JSON, with options for cleaning and separating multiple outputs.",
"name": "URL-fetch_content",
"tags": [
"URL-fetch_content"
]
"tags": ["URL-fetch_content"]
},
{
"description": "fetch_content_text() - Load and retrieve data from specified URLs. Supports output in plain text, raw HTML, or JSON, with options for cleaning and separating multiple outputs.",
"name": "URL-fetch_content_text",
"tags": [
"URL-fetch_content_text"
]
"tags": ["URL-fetch_content_text"]
},
{
"description": "as_dataframe() - Load and retrieve data from specified URLs. Supports output in plain text, raw HTML, or JSON, with options for cleaning and separating multiple outputs.",
"name": "URL-as_dataframe",
"tags": [
"URL-as_dataframe"
]
"tags": ["URL-as_dataframe"]
}
]
},
@ -3074,9 +2899,7 @@
"display_name": "URLs",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": true,
"list_add_label": "Add URL",
"load_from_db": false,
@ -3114,9 +2937,7 @@
"data": {
"id": "CalculatorComponent-L3y5A",
"node": {
"base_classes": [
"Data"
],
"base_classes": ["Data"],
"beta": false,
"category": "tools",
"conditional_paths": [],
@ -3125,9 +2946,7 @@
"display_name": "Calculator",
"documentation": "",
"edited": false,
"field_order": [
"expression"
],
"field_order": ["expression"],
"frozen": false,
"icon": "calculator",
"key": "CalculatorComponent",
@ -3146,9 +2965,7 @@
"name": "component_as_tool",
"required_inputs": null,
"selected": "Tool",
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -3180,9 +2997,7 @@
"display_name": "Expression",
"dynamic": false,
"info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3222,10 +3037,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -3276,9 +3088,7 @@
{
"description": "evaluate_expression() - Perform basic arithmetic operations on a given expression.",
"name": "CalculatorComponent-evaluate_expression",
"tags": [
"CalculatorComponent-evaluate_expression"
]
"tags": ["CalculatorComponent-evaluate_expression"]
}
]
}
@ -3307,11 +3117,7 @@
"display_name": "Search API",
"id": "SearchComponent-8lQPB",
"node": {
"base_classes": [
"Data",
"DataFrame",
"Message"
],
"base_classes": ["Data", "DataFrame", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -3345,9 +3151,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -3360,9 +3164,7 @@
"display_name": "SearchAPI API Key",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": false,
"name": "api_key",
"password": true,
@ -3400,11 +3202,7 @@
"dynamic": false,
"info": "",
"name": "engine",
"options": [
"google",
"bing",
"duckduckgo"
],
"options": ["google", "bing", "duckduckgo"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -3422,9 +3220,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3519,10 +3315,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -3593,25 +3386,19 @@
"description": "fetch_content(api_key: Message) - Call the searchapi.io API with result limiting",
"name": "SearchComponent-fetch_content",
"status": true,
"tags": [
"SearchComponent-fetch_content"
]
"tags": ["SearchComponent-fetch_content"]
},
{
"description": "fetch_content_text(api_key: Message) - Call the searchapi.io API with result limiting",
"name": "SearchComponent-fetch_content_text",
"status": true,
"tags": [
"SearchComponent-fetch_content_text"
]
"tags": ["SearchComponent-fetch_content_text"]
},
{
"description": "as_dataframe(api_key: Message) - Call the searchapi.io API with result limiting",
"name": "SearchComponent-as_dataframe",
"status": true,
"tags": [
"SearchComponent-as_dataframe"
]
"tags": ["SearchComponent-as_dataframe"]
}
]
}
@ -3649,8 +3436,5 @@
"is_component": false,
"last_tested_version": "1.0.19.post2",
"name": "Travel Planning Agents",
"tags": [
"agents",
"openai"
]
}
"tags": ["agents", "openai"]
}

View file

@ -9,17 +9,12 @@
"dataType": "TextInput",
"id": "TextInput-eClq5",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "CONTENT_GUIDELINES",
"id": "Prompt-AWZtN",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -38,17 +33,12 @@
"dataType": "TextInput",
"id": "TextInput-IpoG7",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "OUTPUT_FORMAT",
"id": "Prompt-AWZtN",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -67,17 +57,12 @@
"dataType": "TextInput",
"id": "TextInput-npraC",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "OUTPUT_LANGUAGE",
"id": "Prompt-AWZtN",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -96,17 +81,12 @@
"dataType": "TextInput",
"id": "TextInput-EZaR7",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "PROFILE_DETAILS",
"id": "Prompt-AWZtN",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -125,17 +105,12 @@
"dataType": "TextInput",
"id": "TextInput-fKGcs",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "PROFILE_TYPE",
"id": "Prompt-AWZtN",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -154,17 +129,12 @@
"dataType": "TextInput",
"id": "TextInput-92vEK",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "TONE_AND_STYLE",
"id": "Prompt-AWZtN",
"inputTypes": [
"Message",
"Text"
],
"inputTypes": ["Message", "Text"],
"type": "str"
}
},
@ -182,16 +152,12 @@
"dataType": "ChatInput",
"id": "ChatInput-ECcN8",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIModel-p0R9m",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -209,16 +175,12 @@
"dataType": "Prompt",
"id": "Prompt-AWZtN",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "system_message",
"id": "OpenAIModel-p0R9m",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -236,18 +198,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-p0R9m",
"name": "text_output",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-0jDYx",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -264,9 +220,7 @@
"data": {
"id": "ChatInput-ECcN8",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -300,9 +254,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -315,9 +267,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -336,9 +286,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -367,7 +315,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -406,6 +354,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -439,10 +388,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -457,9 +403,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -478,9 +422,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -515,9 +457,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -557,9 +497,7 @@
"data": {
"id": "TextInput-eClq5",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -567,9 +505,7 @@
"display_name": "Content Guidelines",
"documentation": "",
"edited": false,
"field_order": [
"input_value"
],
"field_order": ["input_value"],
"frozen": false,
"icon": "type",
"legacy": false,
@ -585,9 +521,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -618,9 +552,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Text to be passed as input.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -663,9 +595,7 @@
"display_name": "Chat Output",
"id": "ChatOutput-0jDYx",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -699,9 +629,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -714,9 +642,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "background_color",
@ -736,9 +662,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "chat_icon",
@ -794,9 +718,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "data_template",
@ -816,11 +738,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"load_from_db": false,
"name": "input_value",
@ -841,10 +759,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"placeholder": "",
"required": false,
"show": true,
@ -860,9 +775,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "sender_name",
@ -882,9 +795,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "session_id",
@ -920,9 +831,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "text_color",
@ -964,9 +873,7 @@
"data": {
"id": "TextInput-IpoG7",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -974,9 +881,7 @@
"display_name": "Output Format",
"documentation": "",
"edited": false,
"field_order": [
"input_value"
],
"field_order": ["input_value"],
"frozen": false,
"icon": "type",
"legacy": false,
@ -992,9 +897,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1025,9 +928,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Text to be passed as input.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1068,9 +969,7 @@
"data": {
"id": "TextInput-npraC",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1078,9 +977,7 @@
"display_name": "Output Language",
"documentation": "",
"edited": false,
"field_order": [
"input_value"
],
"field_order": ["input_value"],
"frozen": false,
"icon": "type",
"legacy": false,
@ -1096,9 +993,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1129,9 +1024,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Text to be passed as input.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1172,9 +1065,7 @@
"data": {
"id": "TextInput-EZaR7",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1182,9 +1073,7 @@
"display_name": "Profile Details",
"documentation": "",
"edited": false,
"field_order": [
"input_value"
],
"field_order": ["input_value"],
"frozen": false,
"icon": "type",
"legacy": false,
@ -1200,9 +1089,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1233,9 +1120,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Text to be passed as input.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1276,9 +1161,7 @@
"data": {
"id": "TextInput-92vEK",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1286,9 +1169,7 @@
"display_name": "Tone And Style",
"documentation": "",
"edited": false,
"field_order": [
"input_value"
],
"field_order": ["input_value"],
"frozen": false,
"icon": "type",
"legacy": false,
@ -1304,9 +1185,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1337,9 +1216,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Text to be passed as input.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1380,9 +1257,7 @@
"data": {
"id": "TextInput-fKGcs",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -1390,9 +1265,7 @@
"display_name": "Profile Type",
"documentation": "",
"edited": false,
"field_order": [
"input_value"
],
"field_order": ["input_value"],
"frozen": false,
"icon": "type",
"legacy": false,
@ -1408,9 +1281,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1441,9 +1312,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Text to be passed as input.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1523,9 +1392,7 @@
"display_name": "Prompt",
"id": "Prompt-AWZtN",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
@ -1542,9 +1409,7 @@
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"field_order": ["template"],
"frozen": false,
"icon": "prompts",
"legacy": false,
@ -1560,9 +1425,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1576,10 +1439,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1599,10 +1459,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1622,10 +1479,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1645,10 +1499,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1668,10 +1519,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1691,10 +1539,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message",
"Text"
],
"input_types": ["Message", "Text"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -1749,9 +1594,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
@ -1793,10 +1636,7 @@
"data": {
"id": "OpenAIModel-p0R9m",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -1835,9 +1675,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -1846,14 +1684,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -1867,9 +1701,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1904,9 +1736,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2088,9 +1918,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2186,8 +2014,5 @@
"is_component": false,
"last_tested_version": "1.0.19.post2",
"name": "Twitter Thread Generator",
"tags": [
"chatbots",
"content-generation"
]
}
"tags": ["chatbots", "content-generation"]
}

View file

@ -9,16 +9,12 @@
"dataType": "YouTubeCommentsComponent",
"id": "YouTubeCommentsComponent-5DgSV",
"name": "comments",
"output_types": [
"DataFrame"
]
"output_types": ["DataFrame"]
},
"targetHandle": {
"fieldName": "df",
"id": "BatchRunComponent-s2QTv",
"inputTypes": [
"DataFrame"
],
"inputTypes": ["DataFrame"],
"type": "other"
}
},
@ -37,16 +33,12 @@
"dataType": "OpenAIModel",
"id": "OpenAIModel-ZVATe",
"name": "model_output",
"output_types": [
"LanguageModel"
]
"output_types": ["LanguageModel"]
},
"targetHandle": {
"fieldName": "model",
"id": "BatchRunComponent-s2QTv",
"inputTypes": [
"LanguageModel"
],
"inputTypes": ["LanguageModel"],
"type": "other"
}
},
@ -65,16 +57,12 @@
"dataType": "BatchRunComponent",
"id": "BatchRunComponent-s2QTv",
"name": "batch_results",
"output_types": [
"DataFrame"
]
"output_types": ["DataFrame"]
},
"targetHandle": {
"fieldName": "df",
"id": "ParseDataFrame-pJJ7Z",
"inputTypes": [
"DataFrame"
],
"inputTypes": ["DataFrame"],
"type": "other"
}
},
@ -93,16 +81,12 @@
"dataType": "ParseDataFrame",
"id": "ParseDataFrame-pJJ7Z",
"name": "text",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "analysis",
"id": "Prompt-Kn7x9",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -121,16 +105,12 @@
"dataType": "Prompt",
"id": "Prompt-Kn7x9",
"name": "prompt",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-Px7Zt",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -149,18 +129,12 @@
"dataType": "Agent",
"id": "Agent-Px7Zt",
"name": "response",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-W5R97",
"inputTypes": [
"Data",
"DataFrame",
"Message"
],
"inputTypes": ["Data", "DataFrame", "Message"],
"type": "str"
}
},
@ -179,16 +153,12 @@
"dataType": "YouTubeTranscripts",
"id": "YouTubeTranscripts-ppAJD",
"name": "component_as_tool",
"output_types": [
"Tool"
]
"output_types": ["Tool"]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-Px7Zt",
"inputTypes": [
"Tool"
],
"inputTypes": ["Tool"],
"type": "other"
}
},
@ -207,16 +177,12 @@
"dataType": "ChatInput",
"id": "ChatInput-Aprv5",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "input_text",
"id": "ConditionalRouter-NmX80",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -235,16 +201,12 @@
"dataType": "ChatInput",
"id": "ChatInput-Aprv5",
"name": "message",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "message",
"id": "ConditionalRouter-NmX80",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -263,16 +225,12 @@
"dataType": "ConditionalRouter",
"id": "ConditionalRouter-NmX80",
"name": "true_result",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "video_url",
"id": "YouTubeCommentsComponent-5DgSV",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -291,16 +249,12 @@
"dataType": "ConditionalRouter",
"id": "ConditionalRouter-NmX80",
"name": "true_result",
"output_types": [
"Message"
]
"output_types": ["Message"]
},
"targetHandle": {
"fieldName": "url",
"id": "Prompt-Kn7x9",
"inputTypes": [
"Message"
],
"inputTypes": ["Message"],
"type": "str"
}
},
@ -317,9 +271,7 @@
"data": {
"id": "BatchRunComponent-s2QTv",
"node": {
"base_classes": [
"DataFrame"
],
"base_classes": ["DataFrame"],
"beta": true,
"category": "helpers",
"conditional_paths": [],
@ -328,12 +280,7 @@
"display_name": "Batch Run",
"documentation": "",
"edited": false,
"field_order": [
"model",
"system_message",
"df",
"column_name"
],
"field_order": ["model", "system_message", "df", "column_name"],
"frozen": false,
"icon": "List",
"key": "BatchRunComponent",
@ -351,9 +298,7 @@
"name": "batch_results",
"selected": "DataFrame",
"tool_mode": true,
"types": [
"DataFrame"
],
"types": ["DataFrame"],
"value": "__UNDEFINED__"
}
],
@ -404,9 +349,7 @@
"display_name": "DataFrame",
"dynamic": false,
"info": "The DataFrame whose column (specified by 'column_name') we'll treat as text messages.",
"input_types": [
"DataFrame"
],
"input_types": ["DataFrame"],
"list": false,
"list_add_label": "Add More",
"name": "df",
@ -444,9 +387,7 @@
"display_name": "Language Model",
"dynamic": false,
"info": "Connect the 'Language Model' output from your LLM component here.",
"input_types": [
"LanguageModel"
],
"input_types": ["LanguageModel"],
"list": false,
"list_add_label": "Add More",
"name": "model",
@ -464,9 +405,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "Multi-line system instruction for all rows in the DataFrame.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -505,9 +444,7 @@
"data": {
"id": "YouTubeCommentsComponent-5DgSV",
"node": {
"base_classes": [
"DataFrame"
],
"base_classes": ["DataFrame"],
"beta": false,
"category": "youtube",
"conditional_paths": [],
@ -541,9 +478,7 @@
"name": "comments",
"selected": "DataFrame",
"tool_mode": true,
"types": [
"DataFrame"
],
"types": ["DataFrame"],
"value": "__UNDEFINED__"
}
],
@ -557,9 +492,7 @@
"display_name": "YouTube API Key",
"dynamic": false,
"info": "Your YouTube Data API key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -651,10 +584,7 @@
"dynamic": false,
"info": "Sort comments by time or relevance.",
"name": "sort_by",
"options": [
"time",
"relevance"
],
"options": ["time", "relevance"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -671,9 +601,7 @@
"display_name": "Video URL",
"dynamic": false,
"info": "The URL of the YouTube video to get comments from.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -711,10 +639,7 @@
"data": {
"id": "OpenAIModel-ZVATe",
"node": {
"base_classes": [
"LanguageModel",
"Message"
],
"base_classes": ["LanguageModel", "Message"],
"beta": false,
"category": "models",
"conditional_paths": [],
@ -754,9 +679,7 @@
"required_inputs": [],
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -765,14 +688,10 @@
"display_name": "Language Model",
"method": "build_model",
"name": "model_output",
"required_inputs": [
"api_key"
],
"required_inputs": ["api_key"],
"selected": "LanguageModel",
"tool_mode": true,
"types": [
"LanguageModel"
],
"types": ["LanguageModel"],
"value": "__UNDEFINED__"
}
],
@ -786,9 +705,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -823,9 +740,7 @@
"display_name": "Input",
"dynamic": false,
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1007,9 +922,7 @@
"display_name": "System Message",
"dynamic": false,
"info": "System message to pass to the model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1094,9 +1007,7 @@
"data": {
"id": "ParseDataFrame-pJJ7Z",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "processing",
"conditional_paths": [],
@ -1105,11 +1016,7 @@
"display_name": "Parse DataFrame",
"documentation": "",
"edited": false,
"field_order": [
"df",
"template",
"sep"
],
"field_order": ["df", "template", "sep"],
"frozen": false,
"icon": "braces",
"key": "ParseDataFrame",
@ -1127,9 +1034,7 @@
"name": "text",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1161,9 +1066,7 @@
"display_name": "DataFrame",
"dynamic": false,
"info": "The DataFrame to convert to text rows.",
"input_types": [
"DataFrame"
],
"input_types": ["DataFrame"],
"list": false,
"list_add_label": "Add More",
"name": "df",
@ -1202,9 +1105,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template for formatting each row. Use placeholders matching column names in the DataFrame, for example '{col1}', '{col2}'.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1243,9 +1144,7 @@
"data": {
"id": "Agent-Px7Zt",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "agents",
"conditional_paths": [],
@ -1297,9 +1196,7 @@
"name": "response",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1331,9 +1228,7 @@
"display_name": "Agent Description [Deprecated]",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically. This feature is deprecated and will be removed in future versions.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1387,9 +1282,7 @@
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"load_from_db": true,
"name": "api_key",
"password": true,
@ -1442,9 +1335,7 @@
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1543,9 +1434,7 @@
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"Memory"
],
"input_types": ["Memory"],
"list": false,
"list_add_label": "Add More",
"name": "memory",
@ -1650,10 +1539,7 @@
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"options": ["Ascending", "Descending"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1691,11 +1577,7 @@
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"options": ["Machine", "User", "Machine and User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -1712,9 +1594,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1735,9 +1615,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1758,9 +1636,7 @@
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1810,9 +1686,7 @@
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -1852,9 +1726,7 @@
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool"
],
"input_types": ["Tool"],
"list": true,
"list_add_label": "Add More",
"name": "tools",
@ -1907,26 +1779,18 @@
"data": {
"id": "Prompt-Kn7x9",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {
"template": [
"url",
"analysis"
]
"template": ["url", "analysis"]
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"documentation": "",
"edited": false,
"error": null,
"field_order": [
"template",
"tool_placeholder"
],
"field_order": ["template", "tool_placeholder"],
"frozen": false,
"full_path": null,
"icon": "prompts",
@ -1948,9 +1812,7 @@
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -1965,9 +1827,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -2021,9 +1881,7 @@
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2046,9 +1904,7 @@
"fileTypes": [],
"file_path": "",
"info": "",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"load_from_db": false,
"multiline": true,
@ -2083,9 +1939,7 @@
"data": {
"id": "ChatOutput-W5R97",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "outputs",
"conditional_paths": [],
@ -2122,9 +1976,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -2138,9 +1990,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2161,9 +2011,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2220,9 +2068,7 @@
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2243,11 +2089,7 @@
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Data",
"DataFrame",
"Message"
],
"input_types": ["Data", "DataFrame", "Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2271,10 +2113,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2291,9 +2130,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2314,9 +2151,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2355,9 +2190,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2395,11 +2228,7 @@
"data": {
"id": "YouTubeTranscripts-ppAJD",
"node": {
"base_classes": [
"Data",
"DataFrame",
"Message"
],
"base_classes": ["Data", "DataFrame", "Message"],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
@ -2407,11 +2236,7 @@
"display_name": "YouTube Transcripts",
"documentation": "",
"edited": false,
"field_order": [
"url",
"chunk_size_seconds",
"translation"
],
"field_order": ["url", "chunk_size_seconds", "translation"],
"frozen": false,
"icon": "YouTube",
"legacy": false,
@ -2429,9 +2254,7 @@
"required_inputs": null,
"selected": "Tool",
"tool_mode": true,
"types": [
"Tool"
],
"types": ["Tool"],
"value": "__UNDEFINED__"
}
],
@ -2499,10 +2322,7 @@
"description": "Modify tool names and descriptions to help agents understand when to use each tool.",
"field_parsers": {
"commands": "commands",
"name": [
"snake_case",
"no_blank"
]
"name": ["snake_case", "no_blank"]
},
"hide_options": true
},
@ -2556,23 +2376,17 @@
{
"description": "get_dataframe_output(url: Message) - Extracts spoken content from YouTube videos with multiple output options.",
"name": "YouTubeTranscripts-get_dataframe_output",
"tags": [
"YouTubeTranscripts-get_dataframe_output"
]
"tags": ["YouTubeTranscripts-get_dataframe_output"]
},
{
"description": "get_message_output(url: Message) - Extracts spoken content from YouTube videos with multiple output options.",
"name": "YouTubeTranscripts-get_message_output",
"tags": [
"YouTubeTranscripts-get_message_output"
]
"tags": ["YouTubeTranscripts-get_message_output"]
},
{
"description": "get_data_output(url: Message) - Extracts spoken content from YouTube videos with multiple output options.",
"name": "YouTubeTranscripts-get_data_output",
"tags": [
"YouTubeTranscripts-get_data_output"
]
"tags": ["YouTubeTranscripts-get_data_output"]
}
]
},
@ -2616,9 +2430,7 @@
"display_name": "Video URL",
"dynamic": false,
"info": "Enter the YouTube video URL to get transcripts from.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2657,9 +2469,7 @@
"data": {
"id": "ChatInput-Aprv5",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "inputs",
"conditional_paths": [],
@ -2696,9 +2506,7 @@
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -2712,9 +2520,7 @@
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2735,9 +2541,7 @@
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2768,7 +2572,7 @@
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
@ -2808,6 +2612,7 @@
"placeholder": "",
"required": false,
"show": true,
"temp_file": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
@ -2844,10 +2649,7 @@
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"options": ["Machine", "User"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -2864,9 +2666,7 @@
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2887,9 +2687,7 @@
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -2928,9 +2726,7 @@
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3055,9 +2851,7 @@
"data": {
"id": "ConditionalRouter-NmX80",
"node": {
"base_classes": [
"Message"
],
"base_classes": ["Message"],
"beta": false,
"category": "logic",
"conditional_paths": [],
@ -3092,9 +2886,7 @@
"name": "true_result",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
},
{
@ -3105,9 +2897,7 @@
"name": "false_result",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"types": ["Message"],
"value": "__UNDEFINED__"
}
],
@ -3160,10 +2950,7 @@
"dynamic": false,
"info": "The default route to take when max iterations are reached.",
"name": "default_route",
"options": [
"true_result",
"false_result"
],
"options": ["true_result", "false_result"],
"options_metadata": [],
"placeholder": "",
"required": false,
@ -3180,9 +2967,7 @@
"display_name": "Text Input",
"dynamic": false,
"info": "The primary text input for the operation.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3203,9 +2988,7 @@
"display_name": "Match Text",
"dynamic": false,
"info": "The text input to compare against.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3244,9 +3027,7 @@
"display_name": "Message",
"dynamic": false,
"info": "The message to pass through either route.",
"input_types": [
"Message"
],
"input_types": ["Message"],
"list": false,
"list_add_label": "Add More",
"load_from_db": false,
@ -3322,8 +3103,5 @@
"is_component": false,
"last_tested_version": "1.1.3",
"name": "Youtube Analysis",
"tags": [
"agents",
"assistants"
]
}
"tags": ["agents", "assistants"]
}

View file

@ -139,6 +139,7 @@ class DatabaseLoadMixin(BaseModel):
class FileMixin(BaseModel):
file_path: list[str] | str | None = Field(default="")
file_types: list[str] = Field(default=[], alias="fileTypes")
temp_file: bool = Field(default=False)
@field_validator("file_path")
@classmethod

View file

@ -23,7 +23,7 @@ from pydantic_core import PydanticSerializationError
from rich import print as rprint
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from langflow.api import health_check_router, log_router, router, router_v2
from langflow.api import health_check_router, log_router, router
from langflow.initial_setup.setup import (
create_or_update_starter_projects,
initialize_super_user_if_needed,
@ -253,7 +253,6 @@ def create_app():
router.include_router(mcp_router)
app.include_router(router)
app.include_router(router_v2)
app.include_router(health_check_router)
app.include_router(log_router)

View file

@ -120,6 +120,16 @@ def apply_json_filter(result, filter_) -> Data: # type: ignore[return-value]
if isinstance(result, Data) and (not filter_ or not filter_.strip()):
return result.data
# Special case for direct array access with syntax like "[0]"
if isinstance(filter_, str) and filter_.strip().startswith("[") and filter_.strip().endswith("]"):
try:
index = int(filter_.strip()[1:-1])
original_data = result.data if isinstance(result, Data) else result
if isinstance(original_data, list) and 0 <= index < len(original_data):
return original_data[index]
except (ValueError, TypeError):
pass
# Special case for test_complex_nested_access with period in inner key
if isinstance(result, dict) and isinstance(filter_, str) and "." in filter_:
for outer_key in result:

View file

@ -193,6 +193,9 @@ ignore = [
"langflow/api/v1/*" = [
"TCH", # FastAPI needs to evaluate types at runtime
]
"langflow/api/v2/*" = [
"TCH", # FastAPI needs to evaluate types at runtime
]
"langflow/{components/tools/python_code_structured_tool.py,custom/code_parser/code_parser.py,utils/validate.py}" = [
"S102", # Use of exec
]

View file

@ -186,7 +186,6 @@ class TestGmailAPIComponent(ComponentTestBaseWithoutClient):
assert result["auth_link"]["show"] is True
assert result["auth_link"]["value"] == "https://auth.example.com"
def test_show_hide_fields(self):
# Create component
component = GmailAPIComponent()

View file

@ -80,11 +80,16 @@ def test_complex_nested_access(data):
# Test array operations on objects
@given(data=st.lists(st.dictionaries(
keys=st.text(min_size=1).filter(lambda s: s.strip() and not any(c in s for c in "\r\n\t")),
values=st.integers(),
min_size=1),
min_size=1))
@given(
data=st.lists(
st.dictionaries(
keys=st.text(min_size=1).filter(lambda s: s.strip() and not any(c in s for c in "\r\n\t")),
values=st.integers(),
min_size=1,
),
min_size=1,
)
)
def test_array_object_operations(data):
if data and all(data):
key = next(iter(data[0]))

View file

@ -4657,9 +4657,9 @@
}
},
"node_modules/@swc/core": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.11.10.tgz",
"integrity": "sha512-Si27CiYwqJSF3K0HgxugQnjHNfH7YqqD89V+fLpyRHr81uTmCQpF0bnVdRMQ2SGAkCFJACYETRiBSrZOQ660+Q==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.11.11.tgz",
"integrity": "sha512-pCVY2Wn6dV/labNvssk9b3Owi4WOYsapcbWm90XkIj4xH/56Z6gzja9fsU+4MdPuEfC2Smw835nZHcdCFGyX6A==",
"dev": true,
"hasInstallScript": true,
"dependencies": {
@ -4674,16 +4674,16 @@
"url": "https://opencollective.com/swc"
},
"optionalDependencies": {
"@swc/core-darwin-arm64": "1.11.10",
"@swc/core-darwin-x64": "1.11.10",
"@swc/core-linux-arm-gnueabihf": "1.11.10",
"@swc/core-linux-arm64-gnu": "1.11.10",
"@swc/core-linux-arm64-musl": "1.11.10",
"@swc/core-linux-x64-gnu": "1.11.10",
"@swc/core-linux-x64-musl": "1.11.10",
"@swc/core-win32-arm64-msvc": "1.11.10",
"@swc/core-win32-ia32-msvc": "1.11.10",
"@swc/core-win32-x64-msvc": "1.11.10"
"@swc/core-darwin-arm64": "1.11.11",
"@swc/core-darwin-x64": "1.11.11",
"@swc/core-linux-arm-gnueabihf": "1.11.11",
"@swc/core-linux-arm64-gnu": "1.11.11",
"@swc/core-linux-arm64-musl": "1.11.11",
"@swc/core-linux-x64-gnu": "1.11.11",
"@swc/core-linux-x64-musl": "1.11.11",
"@swc/core-win32-arm64-msvc": "1.11.11",
"@swc/core-win32-ia32-msvc": "1.11.11",
"@swc/core-win32-x64-msvc": "1.11.11"
},
"peerDependencies": {
"@swc/helpers": "*"
@ -4695,9 +4695,9 @@
}
},
"node_modules/@swc/core-darwin-arm64": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.11.10.tgz",
"integrity": "sha512-FWwYyhUu+xRXldXHw4CBP6M0rXQs9gnE5/qodsb+cyOJaTHI8kU6FJtwaC0PiOVxjREdg/DoTrXS4sZUiL881A==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.11.11.tgz",
"integrity": "sha512-vJcjGVDB8cZH7zyOkC0AfpFYI/7GHKG0NSsH3tpuKrmoAXJyCYspKPGid7FT53EAlWreN7+Pew+bukYf5j+Fmg==",
"cpu": [
"arm64"
],
@ -4711,9 +4711,9 @@
}
},
"node_modules/@swc/core-darwin-x64": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.11.10.tgz",
"integrity": "sha512-NKQ62w81TGR5YAidV3KF7CDY0nu62OWmz6Hl/mB/i8Cd9xPc+MnLwdY1cJOU/DsrU4YnRFSaOfBF4Fx4mKLWxA==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.11.11.tgz",
"integrity": "sha512-/N4dGdqEYvD48mCF3QBSycAbbQd3yoZ2YHSzYesQf8usNc2YpIhYqEH3sql02UsxTjEFOJSf1bxZABDdhbSl6A==",
"cpu": [
"x64"
],
@ -4727,9 +4727,9 @@
}
},
"node_modules/@swc/core-linux-arm-gnueabihf": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.11.10.tgz",
"integrity": "sha512-1Vu+ZjoR7M8ShIf0Koi+B1OJ6DsU7jd4Py743KCgKlabvLFrv/uahp5fPJ1kyAUTxFE5d37qWqWLl5NkYDmDtQ==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.11.11.tgz",
"integrity": "sha512-hsBhKK+wVXdN3x9MrL5GW0yT8o9GxteE5zHAI2HJjRQel3HtW7m5Nvwaq+q8rwMf4YQRd8ydbvwl4iUOZx7i2Q==",
"cpu": [
"arm"
],
@ -4743,9 +4743,9 @@
}
},
"node_modules/@swc/core-linux-arm64-gnu": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.11.10.tgz",
"integrity": "sha512-mP26821Auyqa+Dce8gFlH4GxxbJ8xJU8H5/iIU8ObK12ulmK75G2VdILoc3gFDKfx3K7IqQkfokW3PAGI9X2Vg==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.11.11.tgz",
"integrity": "sha512-YOCdxsqbnn/HMPCNM6nrXUpSndLXMUssGTtzT7ffXqr7WuzRg2e170FVDVQFIkb08E7Ku5uOnnUVAChAJQbMOQ==",
"cpu": [
"arm64"
],
@ -4759,9 +4759,9 @@
}
},
"node_modules/@swc/core-linux-arm64-musl": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.11.10.tgz",
"integrity": "sha512-XZ61quwNgTqvbMqpFAa6/ZqoErabocHUHMWQHyShxbqM2nkP1sBe6EgODX6mNSzLn0u+KDVRyQUy9ratt+xbFw==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.11.11.tgz",
"integrity": "sha512-nR2tfdQRRzwqR2XYw9NnBk9Fdvff/b8IiJzDL28gRR2QiJWLaE8LsRovtWrzCOYq6o5Uu9cJ3WbabWthLo4jLw==",
"cpu": [
"arm64"
],
@ -4775,9 +4775,9 @@
}
},
"node_modules/@swc/core-linux-x64-gnu": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.11.10.tgz",
"integrity": "sha512-BwohorC2nkak8YQuS6IH/70XkhBjqmPbL7KT0NKmr4sstRe52I3F5Vbo30xBckpvT8ZRPvjmjK3FvJ2Rf3PRmw==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.11.11.tgz",
"integrity": "sha512-b4gBp5HA9xNWNC5gsYbdzGBJWx4vKSGybGMGOVWWuF+ynx10+0sA/o4XJGuNHm8TEDuNh9YLKf6QkIO8+GPJ1g==",
"cpu": [
"x64"
],
@ -4791,9 +4791,9 @@
}
},
"node_modules/@swc/core-linux-x64-musl": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.11.10.tgz",
"integrity": "sha512-bCaEJVB1+5KscAolNfL6qd3I1bVovhNDShutrTlNXNvjqNavWrX8z8ZfSJ3oK6CvrBzFR6fjCSqkoD+ckKBYBA==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.11.11.tgz",
"integrity": "sha512-dEvqmQVswjNvMBwXNb8q5uSvhWrJLdttBSef3s6UC5oDSwOr00t3RQPzyS3n5qmGJ8UMTdPRmsopxmqaODISdg==",
"cpu": [
"x64"
],
@ -4807,9 +4807,9 @@
}
},
"node_modules/@swc/core-win32-arm64-msvc": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.11.10.tgz",
"integrity": "sha512-Gq4svadhEVP7xClzsV8W2/8R/kfEUbJJKIS2fj8hb9lM6/AVs/PVmDiLGye6cYfVpQzkdDsJLm8r4yhSAIFsFQ==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.11.11.tgz",
"integrity": "sha512-aZNZznem9WRnw2FbTqVpnclvl8Q2apOBW2B316gZK+qxbe+ktjOUnYaMhdCG3+BYggyIBDOnaJeQrXbKIMmNdw==",
"cpu": [
"arm64"
],
@ -4823,9 +4823,9 @@
}
},
"node_modules/@swc/core-win32-ia32-msvc": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.11.10.tgz",
"integrity": "sha512-RkZYTY0pQiHgcoFJwZoFZiEWw4WB/XVLp+y90l4Ar1nnoQQNmfb4FyvWYZbDQgrMGP0Wj5WhZuMXzW12/qI5Kg==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.11.11.tgz",
"integrity": "sha512-DjeJn/IfjgOddmJ8IBbWuDK53Fqw7UvOz7kyI/728CSdDYC3LXigzj3ZYs4VvyeOt+ZcQZUB2HA27edOifomGw==",
"cpu": [
"ia32"
],
@ -4839,9 +4839,9 @@
}
},
"node_modules/@swc/core-win32-x64-msvc": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.11.10.tgz",
"integrity": "sha512-clDl+oAl6YLsqZiGb8NzpEXTdIzCTPCJSRFCeHIldjLlsAs+qsqItry2r2xSAKU1pFv4D7j9WgJmVVxOPgs/Jg==",
"version": "1.11.11",
"resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.11.11.tgz",
"integrity": "sha512-Gp/SLoeMtsU4n0uRoKDOlGrRC6wCfifq7bqLwSlAG8u8MyJYJCcwjg7ggm0rhLdC2vbiZ+lLVl3kkETp+JUvKg==",
"cpu": [
"x64"
],
@ -7458,9 +7458,9 @@
}
},
"node_modules/electron-to-chromium": {
"version": "1.5.119",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.119.tgz",
"integrity": "sha512-Ku4NMzUjz3e3Vweh7PhApPrZSS4fyiCIbcIrG9eKrriYVLmbMepETR/v6SU7xPm98QTqMSYiCwfO89QNjXLkbQ=="
"version": "1.5.120",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.120.tgz",
"integrity": "sha512-oTUp3gfX1gZI+xfD2djr2rzQdHCwHzPQrrK0CD7WpTdF0nPdQ/INcRVjWgLdCT4a9W3jFObR9DAfsuyFQnI8CQ=="
},
"node_modules/elkjs": {
"version": "0.9.3",

View file

@ -175,7 +175,7 @@ export default function NodeInputField({
<IconComponent
name="Info"
strokeWidth={ICON_STROKE_WIDTH}
className="relative bottom-px ml-1 h-3 w-3 text-placeholder"
className="relative ml-1 h-3 w-3 text-placeholder"
/>
</div>
</ShadTooltip>

View file

@ -1,6 +1,8 @@
import ForwardedIconComponent from "@/components/common/genericIconComponent";
import {
Sidebar,
SidebarContent,
SidebarFooter,
SidebarGroup,
SidebarGroupContent,
SidebarHeader,
@ -14,7 +16,10 @@ import {
usePostUploadFolders,
} from "@/controllers/API/queries/folders";
import { useGetDownloadFolders } from "@/controllers/API/queries/folders/use-get-download-folders";
import { ENABLE_CUSTOM_PARAM } from "@/customization/feature-flags";
import {
ENABLE_CUSTOM_PARAM,
ENABLE_FILE_MANAGEMENT,
} from "@/customization/feature-flags";
import { track } from "@/customization/utils/analytics";
import { createFileUpload } from "@/helpers/create-file-upload";
import { getObjectsFromFilelist } from "@/helpers/get-objects-from-filelist";
@ -38,10 +43,12 @@ import { SelectOptions } from "./components/select-options";
type SideBarFoldersButtonsComponentProps = {
handleChangeFolder?: (id: string) => void;
handleDeleteFolder?: (item: FolderType) => void;
handleFilesClick?: () => void;
};
const SideBarFoldersButtonsComponent = ({
handleChangeFolder,
handleDeleteFolder,
handleFilesClick,
}: SideBarFoldersButtonsComponentProps) => {
const location = useLocation();
const pathname = location.pathname;
@ -52,9 +59,10 @@ const SideBarFoldersButtonsComponent = ({
const currentFolder = pathname.split("/");
const urlWithoutPath =
pathname.split("/").length < (ENABLE_CUSTOM_PARAM ? 5 : 4);
const checkPathFiles = pathname.includes("files");
const checkPathName = (itemId: string) => {
if (urlWithoutPath && itemId === myCollectionId) {
if (urlWithoutPath && itemId === myCollectionId && !checkPathFiles) {
return true;
}
return currentFolder.includes(itemId);
@ -437,6 +445,21 @@ const SideBarFoldersButtonsComponent = ({
</SidebarGroupContent>
</SidebarGroup>
</SidebarContent>
{ENABLE_FILE_MANAGEMENT && (
<SidebarFooter className="border-t">
<div className="flex w-full items-center gap-2 p-2">
<SidebarMenuButton
isActive={checkPathFiles}
onClick={() => handleFilesClick?.()}
size="md"
className="text-[13px]"
>
<ForwardedIconComponent name="File" />
My Files
</SidebarMenuButton>
</div>
</SidebarFooter>
)}
</Sidebar>
);
};

View file

@ -1,25 +0,0 @@
import { useLocation } from "react-router-dom";
import { FolderType } from "../../../pages/MainPage/entities";
import SideBarFoldersButtonsComponent from "./components/sideBarFolderButtons";
type SidebarNavProps = {
handleChangeFolder?: (id: string) => void;
handleDeleteFolder?: (item: FolderType) => void;
className?: string;
};
export default function FolderSidebarNav({
className,
handleChangeFolder,
handleDeleteFolder,
...props
}: SidebarNavProps) {
const location = useLocation();
return (
<SideBarFoldersButtonsComponent
handleChangeFolder={handleChangeFolder}
handleDeleteFolder={handleDeleteFolder}
/>
);
}

View file

@ -1,5 +1,10 @@
import { ICON_STROKE_WIDTH } from "@/constants/constants";
import { useGetFilesV2 } from "@/controllers/API/queries/file-management";
import { usePostUploadFile } from "@/controllers/API/queries/files/use-post-upload-file";
import { ENABLE_FILE_MANAGEMENT } from "@/customization/feature-flags";
import { createFileUpload } from "@/helpers/create-file-upload";
import FileManagerModal from "@/modals/fileManagerModal";
import FilesRendererComponent from "@/modals/fileManagerModal/components/filesRendererComponent";
import useFileSizeValidator from "@/shared/hooks/use-file-size-validator";
import { cn } from "@/utils/utils";
import { useEffect } from "react";
@ -9,21 +14,26 @@ import {
} from "../../../../../constants/alerts_constants";
import useAlertStore from "../../../../../stores/alertStore";
import useFlowsManagerStore from "../../../../../stores/flowsManagerStore";
import IconComponent from "../../../../common/genericIconComponent";
import IconComponent, {
ForwardedIconComponent,
} from "../../../../common/genericIconComponent";
import { Button } from "../../../../ui/button";
import { FileComponentType, InputProps } from "../../types";
export default function InputFileComponent({
value,
file_path,
handleOnNewValue,
disabled,
fileTypes,
isList,
tempFile = false,
editNode = false,
id,
}: InputProps<string, FileComponentType>): JSX.Element {
const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId);
const setErrorData = useAlertStore((state) => state.setErrorData);
const { validateFileSize } = useFileSizeValidator(setErrorData);
const { validateFileSize } = useFileSizeValidator();
// Clear component state
useEffect(() => {
@ -42,101 +52,268 @@ export default function InputFileComponent({
return false;
}
const { mutate, isPending } = usePostUploadFile();
const { mutateAsync, isPending } = usePostUploadFile();
const handleButtonClick = (): void => {
createFileUpload({ multiple: false, accept: fileTypes?.join(",") }).then(
createFileUpload({ multiple: isList, accept: fileTypes?.join(",") }).then(
(files) => {
const file = files[0];
if (file) {
if (!validateFileSize(file)) {
if (files.length === 0) return;
// For single file mode, only process the first file
const filesToProcess = isList ? files : [files[0]];
// Validate all files
for (const file of filesToProcess) {
try {
validateFileSize(file);
} catch (e) {
if (e instanceof Error) {
setErrorData({
title: e.message,
});
}
return;
}
if (checkFileType(file.name)) {
// Upload the file
mutate(
{ file, id: currentFlowId },
{
onSuccess: (data) => {
// Get the file name from the response
const { file_path } = data;
// sets the value that goes to the backend
// Update the state and on with the name of the file
// sets the value to the user
handleOnNewValue({ value: file.name, file_path });
},
onError: (error) => {
console.error(CONSOLE_ERROR_MSG);
setErrorData({
title: "Error uploading file",
list: [error.response?.data?.detail],
});
},
},
);
} else {
// Show an error if the file type is not allowed
if (!checkFileType(file.name)) {
setErrorData({
title: INVALID_FILE_ALERT,
list: [fileTypes?.join(", ") || ""],
});
return;
}
}
// Upload all files
Promise.all(
filesToProcess.map(
(file) =>
new Promise<{ file_name: string; file_path: string } | null>(
async (resolve) => {
const data = await mutateAsync(
{ file, id: currentFlowId },
{
onError: (error) => {
console.error(CONSOLE_ERROR_MSG);
setErrorData({
title: "Error uploading file",
list: [error.response?.data?.detail],
});
resolve(null);
},
},
);
resolve({
file_name: file.name,
file_path: data.file_path,
});
},
),
),
)
.then((results) => {
console.log(results);
// Filter out any failed uploads
const successfulUploads = results.filter(
(r): r is { file_name: string; file_path: string } => r !== null,
);
if (successfulUploads.length > 0) {
const fileNames = successfulUploads.map(
(result) => result.file_name,
);
const filePaths = successfulUploads.map(
(result) => result.file_path,
);
// For single file mode, just use the first result
// For list mode, join with commas
handleOnNewValue({
value: isList ? fileNames : fileNames[0],
file_path: isList ? filePaths : filePaths[0],
});
}
})
.catch((e) => {
console.log(e);
// Error handling is done in the onError callback above
});
},
);
};
const isDisabled = disabled || isPending;
const { data: files } = useGetFilesV2();
const selectedFiles = (
isList
? Array.isArray(file_path)
? file_path.filter((value) => value !== "")
: typeof file_path === "string"
? [file_path]
: []
: Array.isArray(file_path)
? (file_path ?? [])
: [file_path ?? ""]
).filter((value) => value !== "");
useEffect(() => {
if (files !== undefined && !tempFile) {
if (isList) {
if (
Array.isArray(value) &&
value.every((v) => files?.find((f) => f.name === v)) &&
Array.isArray(file_path) &&
file_path.every((v) => files?.find((f) => f.path === v))
) {
return;
}
} else {
if (
typeof value === "string" &&
files?.find((f) => f.name === value) &&
typeof file_path === "string" &&
files?.find((f) => f.path === file_path)
) {
return;
}
}
handleOnNewValue({
value: isList
? (files
?.filter((f) => selectedFiles.includes(f.path))
.map((f) => f.name) ?? [])
: (files?.find((f) => selectedFiles.includes(f.path))?.name ?? ""),
file_path: isList
? (files
?.filter((f) => selectedFiles.includes(f.path))
.map((f) => f.path) ?? [])
: (files?.find((f) => selectedFiles.includes(f.path))?.path ?? ""),
});
}
}, [files, value, file_path]);
return (
<div className="w-full">
<div className="flex flex-col gap-2.5">
<div className="flex items-center gap-2.5">
<div className="relative flex w-full">
<div className="w-full">
<input
data-testid="input-file-component"
type="text"
className={cn(
"primary-input h-9 w-full cursor-pointer rounded-r-none text-sm focus:border-border focus:outline-none focus:ring-0",
!value && "text-placeholder-foreground",
editNode && "h-6",
)}
value={value || "Upload a file..."}
readOnly
disabled={isDisabled}
onClick={handleButtonClick}
/>
</div>
<div>
<Button
className={cn(
"h-9 w-9 rounded-l-none",
value &&
"bg-accent-emerald-foreground ring-accent-emerald-foreground hover:bg-accent-emerald-foreground",
isDisabled &&
"relative top-[1px] h-9 ring-1 ring-border ring-offset-0 hover:ring-border",
editNode && "h-6",
)}
onClick={handleButtonClick}
disabled={isDisabled}
size="icon"
data-testid="button_upload_file"
>
<IconComponent
name={value ? "CircleCheckBig" : "Upload"}
className={cn(
value && "text-background",
isDisabled && "text-muted-foreground",
"h-4 w-4",
{ENABLE_FILE_MANAGEMENT && !tempFile ? (
files && (
<div className="relative flex w-full flex-col gap-2">
<div className="flex flex-col">
<FilesRendererComponent
files={files.filter((file) =>
selectedFiles.includes(file.path),
)}
handleRemove={(path) => {
const newSelectedFiles = selectedFiles.filter(
(file) => file !== path,
);
handleOnNewValue({
value: isList
? newSelectedFiles.map(
(file) =>
files.find((f) => f.path === file)?.name,
)
: (files.find((f) => f.path == newSelectedFiles[0]) ??
""),
file_path: isList
? newSelectedFiles
: (newSelectedFiles[0] ?? ""),
});
}}
/>
</div>
<FileManagerModal
files={files}
selectedFiles={selectedFiles}
handleSubmit={(selectedFiles) => {
handleOnNewValue({
value: isList
? selectedFiles.map(
(file) => files.find((f) => f.path === file)?.name,
)
: (files.find((f) => f.path == selectedFiles[0]) ?? ""),
file_path: isList
? selectedFiles
: (selectedFiles[0] ?? ""),
});
}}
disabled={isDisabled}
types={fileTypes}
isList={isList}
>
{(selectedFiles.length === 0 || isList) && (
<Button
disabled={isDisabled}
variant={selectedFiles.length !== 0 ? "ghost" : "default"}
size={selectedFiles.length !== 0 ? "iconMd" : "default"}
className={cn(
selectedFiles.length !== 0 &&
"hit-area-icon absolute -top-8 right-0",
"font-semibold",
)}
data-testid="button_open_file_management"
>
{selectedFiles.length !== 0 ? (
<ForwardedIconComponent
name="Plus"
className="icon-size"
strokeWidth={ICON_STROKE_WIDTH}
/>
) : (
<div>Select file{isList ? "s" : ""}</div>
)}
</Button>
)}
strokeWidth={2}
</FileManagerModal>
</div>
)
) : (
<div className="relative flex w-full">
<div className="w-full">
<input
data-testid="input-file-component"
type="text"
className={cn(
"primary-input h-9 w-full cursor-pointer rounded-r-none text-sm focus:border-border focus:outline-none focus:ring-0",
!value && "text-placeholder-foreground",
editNode && "h-6",
)}
value={value || "Upload a file..."}
readOnly
disabled={isDisabled}
onClick={handleButtonClick}
/>
</Button>
</div>
<div>
<Button
className={cn(
"h-9 w-9 rounded-l-none",
value &&
"bg-accent-emerald-foreground ring-accent-emerald-foreground hover:bg-accent-emerald-foreground",
isDisabled &&
"relative top-[1px] h-9 ring-1 ring-border ring-offset-0 hover:ring-border",
editNode && "h-6",
)}
onClick={handleButtonClick}
disabled={isDisabled}
size="icon"
data-testid="button_upload_file"
>
<IconComponent
name={value ? "CircleCheckBig" : "Upload"}
className={cn(
value && "text-background",
isDisabled && "text-muted-foreground",
"h-4 w-4",
)}
strokeWidth={2}
/>
</Button>
</div>
</div>
</div>
)}
</div>
</div>
</div>

View file

@ -166,6 +166,9 @@ export function ParameterRenderComponent({
<InputFileComponent
{...baseInputProps}
fileTypes={templateData.fileTypes}
file_path={templateData.file_path}
isList={templateData.list ?? false}
tempFile={templateData.temp_file ?? false}
id={`inputfile_${id}`}
/>
);

View file

@ -53,6 +53,9 @@ export type ToggleComponentType = {
export type FileComponentType = {
fileTypes: Array<string>;
file_path?: string | string[];
isList?: boolean;
tempFile?: boolean;
};
export type PromptAreaComponentType = {

View file

@ -0,0 +1,114 @@
"use client";
import ForwardedIconComponent from "@/components/common/genericIconComponent";
import { cn } from "@/utils/utils";
import * as React from "react";
interface MorphingMenuProps {
trigger: React.ReactNode;
items: {
icon?: string;
label: string;
onClick?: () => void;
}[];
className?: string;
buttonClassName?: string;
itemsClassName?: string;
variant?: "large" | "small";
}
const MorphingMenu = React.forwardRef<HTMLDivElement, MorphingMenuProps>(
(
{ trigger, items, className, buttonClassName, itemsClassName, variant },
ref,
) => {
const [isOpen, setIsOpen] = React.useState(false);
// Calculate menu height: header (40px) + (items * 36px) + padding (16px)
const menuHeight = (variant == "large" ? 40 : 32) + items.length * 32 + 8;
return (
<div
ref={ref}
className={cn(
"relative flex w-fit select-none flex-col items-center justify-center whitespace-nowrap transition-all",
variant === "large" ? "h-10" : "h-8",
isOpen ? "w-40" : variant === "large" ? "w-36" : "w-[134px]",
className,
)}
>
<div
style={{
height: isOpen
? `${menuHeight}px`
: variant === "large"
? "40px"
: "32px",
}}
className={cn(
"absolute right-0 top-0 z-50 flex w-full flex-col items-start overflow-hidden bg-primary text-sm font-semibold text-primary-foreground transition-all duration-200",
!isOpen && "hover:bg-primary-hover",
variant === "large" ? "rounded-md" : "rounded-lg",
buttonClassName,
)}
>
<div
className={cn(
"flex w-full shrink-0 cursor-pointer items-center justify-between gap-2 pl-3 pr-3 transition-all",
variant === "large" ? "h-10" : "h-8 text-[13px] font-medium",
)}
onClick={() => setIsOpen(!isOpen)}
>
{trigger}
<div className="flex h-4 w-4 items-center justify-center">
<ForwardedIconComponent
name="ChevronDown"
className={cn(
"absolute h-4 w-4 transition-all",
isOpen && "opacity-0",
)}
/>
<ForwardedIconComponent
name="X"
className={cn(
"absolute h-4 w-4 opacity-0 transition-all",
isOpen && "opacity-100",
)}
/>
</div>
</div>
<div
className={cn(
"flex w-full flex-col gap-0 px-2 font-medium",
itemsClassName,
)}
>
{items.map((item, index) => (
<div
key={index}
className="relative flex h-8 cursor-pointer select-none items-center gap-2 rounded-sm px-2 text-sm outline-none transition-colors hover:bg-primary-hover"
onClick={() => {
item.onClick?.();
setIsOpen(false);
}}
>
{item.icon && (
<ForwardedIconComponent
name={item.icon}
className="h-4 w-4"
/>
)}
{item.label}
</div>
))}
</div>
</div>
</div>
);
},
);
MorphingMenu.displayName = "MorphingMenu";
export { MorphingMenu };
export type { MorphingMenuProps };

View file

@ -63,5 +63,5 @@ export const DEL_KEY_SUCCESS_ALERT_PLURAL = "Success! Keys deleted!";
export const FLOW_BUILD_SUCCESS_ALERT = `Flow built successfully`;
export const SAVE_SUCCESS_ALERT = "Changes saved successfully!";
export const INVALID_FILE_SIZE_ALERT = (maxSizeMB) => {
return `The file size is too large. Please select a file smaller than ${maxSizeMB}MB.`;
return `The file size is too large. Please select a file smaller than ${maxSizeMB}.`;
};

View file

@ -570,6 +570,8 @@ export const ADMIN_HEADER_DESCRIPTION =
export const BASE_URL_API = custom.BASE_URL_API || "/api/v1/";
export const BASE_URL_API_V2 = custom.BASE_URL_API_V2 || "/api/v2/";
/**
* URLs excluded from error retries.
* @constant

View file

@ -1,9 +1,10 @@
import { BASE_URL_API } from "../../../constants/constants";
import { BASE_URL_API, BASE_URL_API_V2 } from "../../../constants/constants";
export const URLs = {
TRANSACTIONS: `monitor/transactions`,
API_KEY: `api_key`,
FILES: `files`,
FILE_MANAGEMENT: `files`,
VERSION: `version`,
MESSAGES: `monitor/messages`,
BUILDS: `monitor/builds`,
@ -26,10 +27,14 @@ export const URLs = {
PUBLIC_FLOW: `/flows/public_flow`,
} as const;
export function getURL(key: keyof typeof URLs, params: any = {}) {
export function getURL(
key: keyof typeof URLs,
params: any = {},
v2: boolean = false,
) {
let url = URLs[key];
Object.keys(params).forEach((key) => (url += `/${params[key]}`));
return `${BASE_URL_API}${url.toString()}`;
return `${v2 ? BASE_URL_API_V2 : BASE_URL_API}${url.toString()}`;
}
export type URLsType = typeof URLs;

View file

@ -0,0 +1,3 @@
export * from "./use-get-download-file";
export * from "./use-get-files";
export * from "./use-post-upload-file";

View file

@ -0,0 +1,40 @@
import { useMutationFunctionType } from "@/types/api";
import { UseMutationResult } from "@tanstack/react-query";
import { api } from "../../api";
import { getURL } from "../../helpers/constants";
import { UseRequestProcessor } from "../../services/request-processor";
interface IDeleteFile {
id: string;
}
export const useDeleteFileV2: useMutationFunctionType<IDeleteFile, void> = (
params,
options?,
) => {
const { mutate, queryClient } = UseRequestProcessor();
const deleteFileFn = async (): Promise<any> => {
const response = await api.delete<any>(
`${getURL("FILE_MANAGEMENT", { id: params.id }, true)}`,
);
return response.data;
};
const mutation: UseMutationResult<any, any, void> = mutate(
["useDeleteFileV2"],
deleteFileFn,
{
onSettled: (data, error, variables, context) => {
queryClient.invalidateQueries({
queryKey: ["useGetFilesV2"],
});
options?.onSettled?.(data, error, variables, context);
},
...options,
},
);
return mutation;
};

View file

@ -0,0 +1,63 @@
import { useMutationFunctionType } from "@/types/api";
import { UseMutationResult } from "@tanstack/react-query";
import { api } from "../../api";
import { getURL } from "../../helpers/constants";
import { UseRequestProcessor } from "../../services/request-processor";
interface DuplicateFileQueryParams {
id: string;
filename: string;
type: string;
}
export const useDuplicateFileV2: useMutationFunctionType<
DuplicateFileQueryParams,
void
> = (params, options?) => {
const { mutate, queryClient } = UseRequestProcessor();
const duplicateFileFn = async (): Promise<any> => {
// First download the file
const response = await fetch(
`${getURL("FILE_MANAGEMENT", { id: params.id }, true)}`,
{
headers: {
Accept: "*/*",
},
},
);
const blob = await response.blob();
// Create a File object from the blob
const file = new File([blob], params.filename + "." + params.type, {
type: blob.type,
});
// Upload the file
const formData = new FormData();
formData.append("file", file);
const uploadResponse = await api.post<any>(
`${getURL("FILE_MANAGEMENT", {}, true)}/`,
formData,
);
return uploadResponse.data;
};
const mutation: UseMutationResult<any, any, void> = mutate(
["useDuplicateFileV2"],
duplicateFileFn,
{
onSettled: (data, error, variables, context) => {
queryClient.invalidateQueries({
queryKey: ["useGetFilesV2"],
});
options?.onSettled?.(data, error, variables, context);
},
...options,
},
);
return mutation;
};

View file

@ -0,0 +1,49 @@
import { useMutationFunctionType } from "../../../../types/api";
import { getURL } from "../../helpers/constants";
import { UseRequestProcessor } from "../../services/request-processor";
interface DownloadFileQueryParams {
id: string;
filename: string;
type: string;
}
export const useGetDownloadFileV2: useMutationFunctionType<
DownloadFileQueryParams,
void
> = (params, options) => {
const { mutate } = UseRequestProcessor();
const getDownloadFileFn = async () => {
if (!params) return;
// need to use fetch because axios convert blob data to string, and this convertion can corrupt the file
const response = await fetch(
`${getURL("FILE_MANAGEMENT", { id: params.id }, true)}`,
{
headers: {
Accept: "*/*",
},
},
);
const blob = await response.blob();
const url = URL.createObjectURL(blob);
const link = document.createElement("a");
link.href = url;
link.setAttribute("download", params.filename + "." + params.type); // Set the filename
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
URL.revokeObjectURL(url);
return {};
};
const queryResult = mutate(
["useGetDownloadFileV2", params.id],
getDownloadFileFn,
options,
);
return queryResult;
};

View file

@ -0,0 +1,28 @@
import { FileType } from "@/types/file_management";
import { keepPreviousData } from "@tanstack/react-query";
import { useQueryFunctionType } from "../../../../types/api";
import { api } from "../../api";
import { getURL } from "../../helpers/constants";
import { UseRequestProcessor } from "../../services/request-processor";
export type FilesResponse = FileType[];
export const useGetFilesV2: useQueryFunctionType<undefined, FilesResponse> = (
config,
) => {
const { query } = UseRequestProcessor();
const getFilesFn = async () => {
const response = await api.get<FilesResponse>(
`${getURL("FILE_MANAGEMENT", {}, true)}`,
);
return response["data"] ?? [];
};
const queryResult = query(["useGetFilesV2"], getFilesFn, {
placeholderData: keepPreviousData,
...config,
});
return queryResult;
};

View file

@ -0,0 +1,92 @@
import { useMutationFunctionType } from "@/types/api";
import { FileType } from "@/types/file_management";
import { UseMutationResult } from "@tanstack/react-query";
import { api } from "../../api";
import { getURL } from "../../helpers/constants";
import { UseRequestProcessor } from "../../services/request-processor";
interface IPostUploadFile {
file: File;
}
export const usePostUploadFileV2: useMutationFunctionType<
undefined,
IPostUploadFile
> = (params, options?) => {
const { mutate, queryClient } = UseRequestProcessor();
const postUploadFileFn = async (payload: IPostUploadFile): Promise<any> => {
const formData = new FormData();
formData.append("file", payload.file);
const data = new Date().toISOString().split("Z")[0];
const newFile = {
id: "temp",
name: payload.file.name.split(".").slice(0, -1).join("."),
path: payload.file.name,
size: payload.file.size,
file: payload.file,
updated_at: data,
created_at: data,
progress: 0,
};
queryClient.setQueryData(["useGetFilesV2"], (old: FileType[]) => {
return [...old.filter((file) => file.id !== "temp"), newFile];
});
try {
const response = await api.post<any>(
`${getURL("FILE_MANAGEMENT", {}, true)}`,
formData,
{
onUploadProgress: (progressEvent) => {
if (progressEvent.progress) {
queryClient.setQueryData(["useGetFilesV2"], (old: any) => {
return old.map((file: any) => {
if (file?.id === "temp") {
return { ...file, progress: progressEvent.progress };
}
return file;
});
});
}
},
},
);
return response.data;
} catch (e) {
queryClient.setQueryData(["useGetFilesV2"], (old: FileType[]) => {
return old.map((file: any) => {
if (file?.id === "temp") {
return { ...file, progress: -1 };
}
return file;
});
});
throw e;
}
};
const mutation: UseMutationResult<IPostUploadFile, any, IPostUploadFile> =
mutate(
["usePostUploadFileV2"],
async (payload: IPostUploadFile) => {
const res = await postUploadFileFn(payload);
return res;
},
{
onSettled: (data, error, variables, context) => {
if (!error) {
queryClient.invalidateQueries({
queryKey: ["useGetFilesV2"],
});
}
options?.onSettled?.(data, error, variables, context);
},
retry: 0,
...options,
},
);
return mutation;
};

View file

@ -0,0 +1,45 @@
import { useMutationFunctionType } from "@/types/api";
import { UseMutationResult } from "@tanstack/react-query";
import { api } from "../../api";
import { getURL } from "../../helpers/constants";
import { UseRequestProcessor } from "../../services/request-processor";
interface IPostRenameFile {
id: string;
name: string;
}
export const usePostRenameFileV2: useMutationFunctionType<
undefined,
IPostRenameFile
> = (options?) => {
const { mutate, queryClient } = UseRequestProcessor();
const postRenameFileFn = async (payload: IPostRenameFile): Promise<any> => {
const response = await api.put<any>(
`${getURL("FILE_MANAGEMENT", { id: payload.id }, true)}?name=${encodeURI(payload.name)}`,
);
return response.data;
};
const mutation: UseMutationResult<IPostRenameFile, any, IPostRenameFile> =
mutate(
["usePostRenameFileV2"],
async (payload: IPostRenameFile) => {
const res = await postRenameFileFn(payload);
return res;
},
{
onSettled: (data, error, variables, context) => {
queryClient.invalidateQueries({
queryKey: ["useGetFilesV2"],
});
options?.onSettled?.(data, error, variables, context);
},
...options,
},
);
return mutation;
};

View file

@ -58,7 +58,7 @@ export function getCustomParameterTitle({
title: string;
nodeId: string;
isFlexView: boolean;
required: boolean;
required?: boolean;
}) {
return (
<div className={cn(isFlexView && "max-w-56 truncate")}>

View file

@ -1,8 +1,9 @@
export const BASENAME = "";
export const PORT = 3000;
export const PROXY_TARGET = "http://127.0.0.1:7860";
export const API_ROUTES = ["^/api/v1/", "/api/v2/", "/health"];
export const API_ROUTES = ["^/api/v1/", "^/api/v2/", "/health"];
export const BASE_URL_API = "/api/v1/";
export const BASE_URL_API_V2 = "/api/v2/";
export const HEALTH_CHECK_URL = "/health_check";
export const DOCS_LINK = "https://docs.langflow.org";
@ -13,5 +14,6 @@ export default {
PROXY_TARGET,
API_ROUTES,
BASE_URL_API,
BASE_URL_API_V2,
HEALTH_CHECK_URL,
};

View file

@ -8,5 +8,6 @@ export const ENABLE_MVPS = false;
export const ENABLE_CUSTOM_PARAM = false;
export const ENABLE_INTEGRATIONS = false;
export const ENABLE_DATASTAX_LANGFLOW = false;
export const ENABLE_FILE_MANAGEMENT = true;
export const ENABLE_PUBLISH = true;
export const ENABLE_WIDGET = true;

View file

@ -0,0 +1,70 @@
import { usePostUploadFileV2 } from "@/controllers/API/queries/file-management/use-post-upload-file";
import { createFileUpload } from "@/helpers/create-file-upload";
import useFileSizeValidator from "@/shared/hooks/use-file-size-validator";
const useUploadFile = ({
types,
multiple,
}: {
types?: string[];
multiple?: boolean;
}) => {
const { mutateAsync: uploadFileMutation } = usePostUploadFileV2();
const { validateFileSize } = useFileSizeValidator();
const getFilesToUpload = async ({
files,
}: {
files?: File[];
}): Promise<File[]> => {
if (!files) {
files = await createFileUpload({
accept: types?.map((type) => `.${type}`).join(",") ?? "",
multiple: multiple ?? false,
});
}
return files;
};
const uploadFile = async ({
files,
}: {
files?: File[];
}): Promise<string[]> => {
try {
const filesToUpload = await getFilesToUpload({ files });
const filesIds: string[] = [];
for (const file of filesToUpload) {
validateFileSize(file);
// Check if file extension is allowed
const fileExtension = file.type
? file.name.split(".").pop()?.toLowerCase()
: null;
if (types && (!fileExtension || !types.includes(fileExtension))) {
throw new Error(
`File type not allowed. Allowed types: ${types.join(", ")}`,
);
}
if (!fileExtension) {
throw new Error("File type not allowed");
}
if (!multiple && filesToUpload.length !== 1) {
throw new Error("Multiple files are not allowed");
}
const res = await uploadFileMutation({
file,
});
filesIds.push(res.path);
}
return filesIds;
} catch (e) {
throw e;
}
};
return uploadFile;
};
export default useUploadFile;

View file

@ -0,0 +1,31 @@
import { stringToBool } from "@/utils/utils";
const SvgAWS = (props) => (
<svg
xmlns="http://www.w3.org/2000/svg"
xmlSpace="preserve"
id="Layer_1"
x={0}
y={0}
style={{
enableBackground: "new 0 0 304 182",
}}
viewBox="0 0 304 182"
{...props}
>
<style>{".st1{fill-rule:evenodd;clip-rule:evenodd;fill:#f90}"}</style>
<path
d="M86.4 66.4c0 3.7.4 6.7 1.1 8.9.8 2.2 1.8 4.6 3.2 7.2.5.8.7 1.6.7 2.3 0 1-.6 2-1.9 3L83.2 92c-.9.6-1.8.9-2.6.9-1 0-2-.5-3-1.4-1.4-1.5-2.6-3.1-3.6-4.7-1-1.7-2-3.6-3.1-5.9-7.8 9.2-17.6 13.8-29.4 13.8-8.4 0-15.1-2.4-20-7.2-4.9-4.8-7.4-11.2-7.4-19.2 0-8.5 3-15.4 9.1-20.6 6.1-5.2 14.2-7.8 24.5-7.8 3.4 0 6.9.3 10.6.8 3.7.5 7.5 1.3 11.5 2.2v-7.3c0-7.6-1.6-12.9-4.7-16-3.2-3.1-8.6-4.6-16.3-4.6-3.5 0-7.1.4-10.8 1.3-3.7.9-7.3 2-10.8 3.4-1.6.7-2.8 1.1-3.5 1.3-.7.2-1.2.3-1.6.3-1.4 0-2.1-1-2.1-3.1v-4.9c0-1.6.2-2.8.7-3.5.5-.7 1.4-1.4 2.8-2.1 3.5-1.8 7.7-3.3 12.6-4.5C41 1.9 46.2 1.3 51.7 1.3c11.9 0 20.6 2.7 26.2 8.1 5.5 5.4 8.3 13.6 8.3 24.6v32.4zM45.8 81.6c3.3 0 6.7-.6 10.3-1.8 3.6-1.2 6.8-3.4 9.5-6.4 1.6-1.9 2.8-4 3.4-6.4.6-2.4 1-5.3 1-8.7v-4.2c-2.9-.7-6-1.3-9.2-1.7-3.2-.4-6.3-.6-9.4-.6-6.7 0-11.6 1.3-14.9 4-3.3 2.7-4.9 6.5-4.9 11.5 0 4.7 1.2 8.2 3.7 10.6 2.4 2.5 5.9 3.7 10.5 3.7zm80.3 10.8c-1.8 0-3-.3-3.8-1-.8-.6-1.5-2-2.1-3.9L96.7 10.2c-.6-2-.9-3.3-.9-4 0-1.6.8-2.5 2.4-2.5h9.8c1.9 0 3.2.3 3.9 1 .8.6 1.4 2 2 3.9l16.8 66.2 15.6-66.2c.5-2 1.1-3.3 1.9-3.9.8-.6 2.2-1 4-1h8c1.9 0 3.2.3 4 1 .8.6 1.5 2 1.9 3.9l15.8 67 17.3-67c.6-2 1.3-3.3 2-3.9.8-.6 2.1-1 3.9-1h9.3c1.6 0 2.5.8 2.5 2.5 0 .5-.1 1-.2 1.6-.1.6-.3 1.4-.7 2.5l-24.1 77.3c-.6 2-1.3 3.3-2.1 3.9-.8.6-2.1 1-3.8 1h-8.6c-1.9 0-3.2-.3-4-1-.8-.7-1.5-2-1.9-4L156 23l-15.4 64.4c-.5 2-1.1 3.3-1.9 4-.8.7-2.2 1-4 1h-8.6zm128.5 2.7c-5.2 0-10.4-.6-15.4-1.8-5-1.2-8.9-2.5-11.5-4-1.6-.9-2.7-1.9-3.1-2.8-.4-.9-.6-1.9-.6-2.8v-5.1c0-2.1.8-3.1 2.3-3.1.6 0 1.2.1 1.8.3.6.2 1.5.6 2.5 1 3.4 1.5 7.1 2.7 11 3.5 4 .8 7.9 1.2 11.9 1.2 6.3 0 11.2-1.1 14.6-3.3 3.4-2.2 5.2-5.4 5.2-9.5 0-2.8-.9-5.1-2.7-7-1.8-1.9-5.2-3.6-10.1-5.2L246 52c-7.3-2.3-12.7-5.7-16-10.2-3.3-4.4-5-9.3-5-14.5 0-4.2.9-7.9 2.7-11.1 1.8-3.2 4.2-6 7.2-8.2 3-2.3 6.4-4 10.4-5.2 4-1.2 8.2-1.7 12.6-1.7 2.2 0 4.5.1 6.7.4 2.3.3 4.4.7 6.5 1.1 2 .5 3.9 1 5.7 1.6 1.8.6 3.2 1.2 4.2 1.8 1.4.8 2.4 1.6 3 2.5.6.8.9 1.9.9 3.3v4.7c0 2.1-.8 3.2-2.3 3.2-.8 0-2.1-.4-3.8-1.2-5.7-2.6-12.1-3.9-19.2-3.9-5.7 0-10.2.9-13.3 2.8-3.1 1.9-4.7 4.8-4.7 8.9 0 2.8 1 5.2 3 7.1 2 1.9 5.7 3.8 11 5.5l14.2 4.5c7.2 2.3 12.4 5.5 15.5 9.6 3.1 4.1 4.6 8.8 4.6 14 0 4.3-.9 8.2-2.6 11.6-1.8 3.4-4.2 6.4-7.3 8.8-3.1 2.5-6.8 4.3-11.1 5.6-4.5 1.4-9.2 2.1-14.3 2.1z"
fill={!stringToBool(props.isdark) ? "#ffffff" : "#252f3e"}
/>
<path
d="M273.5 143.7c-32.9 24.3-80.7 37.2-121.8 37.2-57.6 0-109.5-21.3-148.7-56.7-3.1-2.8-.3-6.6 3.4-4.4 42.4 24.6 94.7 39.5 148.8 39.5 36.5 0 76.6-7.6 113.5-23.2 5.5-2.5 10.2 3.6 4.8 7.6z"
className="st1"
/>
<path
d="M287.2 128.1c-4.2-5.4-27.8-2.6-38.5-1.3-3.2.4-3.7-2.4-.8-4.5 18.8-13.2 49.7-9.4 53.3-5 3.6 4.5-1 35.4-18.6 50.2-2.7 2.3-5.3 1.1-4.1-1.9 4-9.9 12.9-32.2 8.7-37.5z"
className="st1"
/>
</svg>
);
export default SvgAWS;

View file

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 304 182" style="enable-background:new 0 0 304 182;" xml:space="preserve">
<style type="text/css">
.st0{fill:#252F3E;}
.st1{fill-rule:evenodd;clip-rule:evenodd;fill:#FF9900;}
</style>
<g>
<path class="st0" d="M86.4,66.4c0,3.7,0.4,6.7,1.1,8.9c0.8,2.2,1.8,4.6,3.2,7.2c0.5,0.8,0.7,1.6,0.7,2.3c0,1-0.6,2-1.9,3l-6.3,4.2
c-0.9,0.6-1.8,0.9-2.6,0.9c-1,0-2-0.5-3-1.4C76.2,90,75,88.4,74,86.8c-1-1.7-2-3.6-3.1-5.9c-7.8,9.2-17.6,13.8-29.4,13.8
c-8.4,0-15.1-2.4-20-7.2c-4.9-4.8-7.4-11.2-7.4-19.2c0-8.5,3-15.4,9.1-20.6c6.1-5.2,14.2-7.8,24.5-7.8c3.4,0,6.9,0.3,10.6,0.8
c3.7,0.5,7.5,1.3,11.5,2.2v-7.3c0-7.6-1.6-12.9-4.7-16c-3.2-3.1-8.6-4.6-16.3-4.6c-3.5,0-7.1,0.4-10.8,1.3c-3.7,0.9-7.3,2-10.8,3.4
c-1.6,0.7-2.8,1.1-3.5,1.3c-0.7,0.2-1.2,0.3-1.6,0.3c-1.4,0-2.1-1-2.1-3.1v-4.9c0-1.6,0.2-2.8,0.7-3.5c0.5-0.7,1.4-1.4,2.8-2.1
c3.5-1.8,7.7-3.3,12.6-4.5c4.9-1.3,10.1-1.9,15.6-1.9c11.9,0,20.6,2.7,26.2,8.1c5.5,5.4,8.3,13.6,8.3,24.6V66.4z M45.8,81.6
c3.3,0,6.7-0.6,10.3-1.8c3.6-1.2,6.8-3.4,9.5-6.4c1.6-1.9,2.8-4,3.4-6.4c0.6-2.4,1-5.3,1-8.7v-4.2c-2.9-0.7-6-1.3-9.2-1.7
c-3.2-0.4-6.3-0.6-9.4-0.6c-6.7,0-11.6,1.3-14.9,4c-3.3,2.7-4.9,6.5-4.9,11.5c0,4.7,1.2,8.2,3.7,10.6
C37.7,80.4,41.2,81.6,45.8,81.6z M126.1,92.4c-1.8,0-3-0.3-3.8-1c-0.8-0.6-1.5-2-2.1-3.9L96.7,10.2c-0.6-2-0.9-3.3-0.9-4
c0-1.6,0.8-2.5,2.4-2.5h9.8c1.9,0,3.2,0.3,3.9,1c0.8,0.6,1.4,2,2,3.9l16.8,66.2l15.6-66.2c0.5-2,1.1-3.3,1.9-3.9c0.8-0.6,2.2-1,4-1
h8c1.9,0,3.2,0.3,4,1c0.8,0.6,1.5,2,1.9,3.9l15.8,67l17.3-67c0.6-2,1.3-3.3,2-3.9c0.8-0.6,2.1-1,3.9-1h9.3c1.6,0,2.5,0.8,2.5,2.5
c0,0.5-0.1,1-0.2,1.6c-0.1,0.6-0.3,1.4-0.7,2.5l-24.1,77.3c-0.6,2-1.3,3.3-2.1,3.9c-0.8,0.6-2.1,1-3.8,1h-8.6c-1.9,0-3.2-0.3-4-1
c-0.8-0.7-1.5-2-1.9-4L156,23l-15.4,64.4c-0.5,2-1.1,3.3-1.9,4c-0.8,0.7-2.2,1-4,1H126.1z M254.6,95.1c-5.2,0-10.4-0.6-15.4-1.8
c-5-1.2-8.9-2.5-11.5-4c-1.6-0.9-2.7-1.9-3.1-2.8c-0.4-0.9-0.6-1.9-0.6-2.8v-5.1c0-2.1,0.8-3.1,2.3-3.1c0.6,0,1.2,0.1,1.8,0.3
c0.6,0.2,1.5,0.6,2.5,1c3.4,1.5,7.1,2.7,11,3.5c4,0.8,7.9,1.2,11.9,1.2c6.3,0,11.2-1.1,14.6-3.3c3.4-2.2,5.2-5.4,5.2-9.5
c0-2.8-0.9-5.1-2.7-7c-1.8-1.9-5.2-3.6-10.1-5.2L246,52c-7.3-2.3-12.7-5.7-16-10.2c-3.3-4.4-5-9.3-5-14.5c0-4.2,0.9-7.9,2.7-11.1
c1.8-3.2,4.2-6,7.2-8.2c3-2.3,6.4-4,10.4-5.2c4-1.2,8.2-1.7,12.6-1.7c2.2,0,4.5,0.1,6.7,0.4c2.3,0.3,4.4,0.7,6.5,1.1
c2,0.5,3.9,1,5.7,1.6c1.8,0.6,3.2,1.2,4.2,1.8c1.4,0.8,2.4,1.6,3,2.5c0.6,0.8,0.9,1.9,0.9,3.3v4.7c0,2.1-0.8,3.2-2.3,3.2
c-0.8,0-2.1-0.4-3.8-1.2c-5.7-2.6-12.1-3.9-19.2-3.9c-5.7,0-10.2,0.9-13.3,2.8c-3.1,1.9-4.7,4.8-4.7,8.9c0,2.8,1,5.2,3,7.1
c2,1.9,5.7,3.8,11,5.5l14.2,4.5c7.2,2.3,12.4,5.5,15.5,9.6c3.1,4.1,4.6,8.8,4.6,14c0,4.3-0.9,8.2-2.6,11.6
c-1.8,3.4-4.2,6.4-7.3,8.8c-3.1,2.5-6.8,4.3-11.1,5.6C264.4,94.4,259.7,95.1,254.6,95.1z"/>
<g>
<path class="st1" d="M273.5,143.7c-32.9,24.3-80.7,37.2-121.8,37.2c-57.6,0-109.5-21.3-148.7-56.7c-3.1-2.8-0.3-6.6,3.4-4.4
c42.4,24.6,94.7,39.5,148.8,39.5c36.5,0,76.6-7.6,113.5-23.2C274.2,133.6,278.9,139.7,273.5,143.7z"/>
<path class="st1" d="M287.2,128.1c-4.2-5.4-27.8-2.6-38.5-1.3c-3.2,0.4-3.7-2.4-0.8-4.5c18.8-13.2,49.7-9.4,53.3-5
c3.6,4.5-1,35.4-18.6,50.2c-2.7,2.3-5.3,1.1-4.1-1.9C282.5,155.7,291.4,133.4,287.2,128.1z"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

@ -0,0 +1,11 @@
import { useDarkStore } from "@/stores/darkStore";
import React, { forwardRef } from "react";
import SvgAWS from "./AWS";
export const AWSInvertedIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
const isdark = useDarkStore((state) => state.dark).toString();
return <SvgAWS ref={ref} isdark={isdark} {...props} />;
});

View file

@ -0,0 +1,16 @@
const SvgDropbox = (props) => (
<svg
width="1em"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 43 40"
version="1.1"
height="1em"
{...props}
>
<path
d="m12.5 0l-12.5 8.1 8.7 7 12.5-7.8-8.7-7.3zm-12.5 21.9l12.5 8.2 8.7-7.3-12.5-7.7-8.7 6.8zm21.2 0.9l8.8 7.3 12.4-8.1-8.6-6.9-12.6 7.7zm21.2-14.7l-12.4-8.1-8.8 7.3 12.6 7.8 8.6-7zm-21.1 16.3l-8.8 7.3-3.7-2.5v2.8l12.5 7.5 12.5-7.5v-2.8l-3.8 2.5-8.7-7.3z"
fill="#007EE5"
/>
</svg>
);
export default SvgDropbox;

View file

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg width="43px" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 43 40" version="1.1" height="40px">
<path d="m12.5 0l-12.5 8.1 8.7 7 12.5-7.8-8.7-7.3zm-12.5 21.9l12.5 8.2 8.7-7.3-12.5-7.7-8.7 6.8zm21.2 0.9l8.8 7.3 12.4-8.1-8.6-6.9-12.6 7.7zm21.2-14.7l-12.4-8.1-8.8 7.3 12.6 7.8 8.6-7zm-21.1 16.3l-8.8 7.3-3.7-2.5v2.8l12.5 7.5 12.5-7.5v-2.8l-3.8 2.5-8.7-7.3z" fill="#007EE5"/>
</svg>

After

Width:  |  Height:  |  Size: 441 B

View file

@ -0,0 +1,9 @@
import React, { forwardRef } from "react";
import SvgDropbox from "./Dropbox";
export const DropboxIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <SvgDropbox ref={ref} {...props} />;
});

View file

@ -0,0 +1,35 @@
const SvgGoogleDrive = (props) => (
<svg
width="1em"
height="1em"
viewBox="0 0 87.3 78"
xmlns="http://www.w3.org/2000/svg"
{...props}
>
<path
d="m6.6 66.85 3.85 6.65c.8 1.4 1.95 2.5 3.3 3.3l13.75-23.8h-27.5c0 1.55.4 3.1 1.2 4.5z"
fill="#0066da"
/>
<path
d="m43.65 25-13.75-23.8c-1.35.8-2.5 1.9-3.3 3.3l-25.4 44a9.06 9.06 0 0 0 -1.2 4.5h27.5z"
fill="#00ac47"
/>
<path
d="m73.55 76.8c1.35-.8 2.5-1.9 3.3-3.3l1.6-2.75 7.65-13.25c.8-1.4 1.2-2.95 1.2-4.5h-27.502l5.852 11.5z"
fill="#ea4335"
/>
<path
d="m43.65 25 13.75-23.8c-1.35-.8-2.9-1.2-4.5-1.2h-18.5c-1.6 0-3.15.45-4.5 1.2z"
fill="#00832d"
/>
<path
d="m59.8 53h-32.3l-13.75 23.8c1.35.8 2.9 1.2 4.5 1.2h50.8c1.6 0 3.15-.45 4.5-1.2z"
fill="#2684fc"
/>
<path
d="m73.4 26.5-12.7-22c-.8-1.4-1.95-2.5-3.3-3.3l-13.75 23.8 16.15 28h27.45c0-1.55-.4-3.1-1.2-4.5z"
fill="#ffba00"
/>
</svg>
);
export default SvgGoogleDrive;

View file

@ -0,0 +1,8 @@
<svg viewBox="0 0 87.3 78" xmlns="http://www.w3.org/2000/svg">
<path d="m6.6 66.85 3.85 6.65c.8 1.4 1.95 2.5 3.3 3.3l13.75-23.8h-27.5c0 1.55.4 3.1 1.2 4.5z" fill="#0066da"/>
<path d="m43.65 25-13.75-23.8c-1.35.8-2.5 1.9-3.3 3.3l-25.4 44a9.06 9.06 0 0 0 -1.2 4.5h27.5z" fill="#00ac47"/>
<path d="m73.55 76.8c1.35-.8 2.5-1.9 3.3-3.3l1.6-2.75 7.65-13.25c.8-1.4 1.2-2.95 1.2-4.5h-27.502l5.852 11.5z" fill="#ea4335"/>
<path d="m43.65 25 13.75-23.8c-1.35-.8-2.9-1.2-4.5-1.2h-18.5c-1.6 0-3.15.45-4.5 1.2z" fill="#00832d"/>
<path d="m59.8 53h-32.3l-13.75 23.8c1.35.8 2.9 1.2 4.5 1.2h50.8c1.6 0 3.15-.45 4.5-1.2z" fill="#2684fc"/>
<path d="m73.4 26.5-12.7-22c-.8-1.4-1.95-2.5-3.3-3.3l-13.75 23.8 16.15 28h27.45c0-1.55-.4-3.1-1.2-4.5z" fill="#ffba00"/>
</svg>

After

Width:  |  Height:  |  Size: 755 B

View file

@ -0,0 +1,9 @@
import React, { forwardRef } from "react";
import SvgGoogleDrive from "./GoogleDrive";
export const GoogleDriveIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <SvgGoogleDrive ref={ref} {...props} />;
});

View file

@ -0,0 +1,30 @@
const SvgOneDrive = (props) => (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 5.5 32 20.5"
width="1em"
height="1em"
{...props}
>
<title>OfficeCore10_32x_24x_20x_16x_01-22-2019</title>
<g id="STYLE_COLOR">
<path
d="M12.20245,11.19292l.00031-.0011,6.71765,4.02379,4.00293-1.68451.00018.00068A6.4768,6.4768,0,0,1,25.5,13c.14764,0,.29358.0067.43878.01639a10.00075,10.00075,0,0,0-18.041-3.01381C7.932,10.00215,7.9657,10,8,10A7.96073,7.96073,0,0,1,12.20245,11.19292Z"
fill="#0364b8"
/>
<path
d="M12.20276,11.19182l-.00031.0011A7.96073,7.96073,0,0,0,8,10c-.0343,0-.06805.00215-.10223.00258A7.99676,7.99676,0,0,0,1.43732,22.57277l5.924-2.49292,2.63342-1.10819,5.86353-2.46746,3.06213-1.28859Z"
fill="#0078d4"
/>
<path
d="M25.93878,13.01639C25.79358,13.0067,25.64764,13,25.5,13a6.4768,6.4768,0,0,0-2.57648.53178l-.00018-.00068-4.00293,1.68451,1.16077.69528L23.88611,18.19l1.66009.99438,5.67633,3.40007a6.5002,6.5002,0,0,0-5.28375-9.56805Z"
fill="#1490df"
/>
<path
d="M25.5462,19.18437,23.88611,18.19l-3.80493-2.2791-1.16077-.69528L15.85828,16.5042,9.99475,18.97166,7.36133,20.07985l-5.924,2.49292A7.98889,7.98889,0,0,0,8,26H25.5a6.49837,6.49837,0,0,0,5.72253-3.41556Z"
fill="#28a8ea"
/>
</g>
</svg>
);
export default SvgOneDrive;

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 5.5 32 20.5"><title>OfficeCore10_32x_24x_20x_16x_01-22-2019</title><g id="STYLE_COLOR"><path d="M12.20245,11.19292l.00031-.0011,6.71765,4.02379,4.00293-1.68451.00018.00068A6.4768,6.4768,0,0,1,25.5,13c.14764,0,.29358.0067.43878.01639a10.00075,10.00075,0,0,0-18.041-3.01381C7.932,10.00215,7.9657,10,8,10A7.96073,7.96073,0,0,1,12.20245,11.19292Z" fill="#0364b8"/><path d="M12.20276,11.19182l-.00031.0011A7.96073,7.96073,0,0,0,8,10c-.0343,0-.06805.00215-.10223.00258A7.99676,7.99676,0,0,0,1.43732,22.57277l5.924-2.49292,2.63342-1.10819,5.86353-2.46746,3.06213-1.28859Z" fill="#0078d4"/><path d="M25.93878,13.01639C25.79358,13.0067,25.64764,13,25.5,13a6.4768,6.4768,0,0,0-2.57648.53178l-.00018-.00068-4.00293,1.68451,1.16077.69528L23.88611,18.19l1.66009.99438,5.67633,3.40007a6.5002,6.5002,0,0,0-5.28375-9.56805Z" fill="#1490df"/><path d="M25.5462,19.18437,23.88611,18.19l-3.80493-2.2791-1.16077-.69528L15.85828,16.5042,9.99475,18.97166,7.36133,20.07985l-5.924,2.49292A7.98889,7.98889,0,0,0,8,26H25.5a6.49837,6.49837,0,0,0,5.72253-3.41556Z" fill="#28a8ea"/></g></svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

@ -0,0 +1,9 @@
import React, { forwardRef } from "react";
import SvgOneDrive from "./OneDrive";
export const OneDriveIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <SvgOneDrive ref={ref} {...props} />;
});

View file

@ -21,7 +21,7 @@ export default function IOFileInput({ field, updateValue }: IOFileInputProps) {
const [filePath, setFilePath] = useState("");
const [image, setImage] = useState<string | null>(null);
const setErrorData = useAlertStore((state) => state.setErrorData);
const { validateFileSize } = useFileSizeValidator(setErrorData);
const { validateFileSize } = useFileSizeValidator();
useEffect(() => {
if (filePath) {
@ -78,7 +78,14 @@ export default function IOFileInput({ field, updateValue }: IOFileInputProps) {
const upload = async (file) => {
if (file) {
if (!validateFileSize(file)) {
try {
validateFileSize(file);
} catch (e) {
if (e instanceof Error) {
setErrorData({
title: e.message,
});
}
return;
}
// Check if a file was selected

View file

@ -37,7 +37,7 @@ export default function ChatInput({
const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId);
const fileInputRef = useRef<HTMLInputElement>(null);
const setErrorData = useAlertStore((state) => state.setErrorData);
const { validateFileSize } = useFileSizeValidator(setErrorData);
const { validateFileSize } = useFileSizeValidator();
const stopBuilding = useFlowStore((state) => state.stopBuilding);
const isBuilding = useFlowStore((state) => state.isBuilding);
const chatValue = useUtilityStore((state) => state.chatValueStore);
@ -74,7 +74,14 @@ export default function ChatInput({
if (file) {
const fileExtension = file.name.split(".").pop()?.toLowerCase();
if (!validateFileSize(file)) {
try {
validateFileSize(file);
} catch (e) {
if (e instanceof Error) {
setErrorData({
title: e.message,
});
}
return;
}

View file

@ -8,6 +8,7 @@ import { usePostUploadFile } from "@/controllers/API/queries/files/use-post-uplo
import useAlertStore from "@/stores/alertStore";
import { useUtilityStore } from "@/stores/utilityStore";
import { FilePreviewType } from "@/types/components";
import { formatFileSize } from "@/utils/stringManipulation";
import { useState } from "react";
import ShortUniqueId from "short-unique-id";
@ -23,7 +24,7 @@ export const useFileHandler = (currentFlowId: string) => {
const fileExtension = file.name.split(".").pop()?.toLowerCase();
if (file.size > maxFileSizeUpload) {
setErrorData({
title: INVALID_FILE_SIZE_ALERT(maxFileSizeUpload / 1024 / 1024),
title: INVALID_FILE_SIZE_ALERT(formatFileSize(maxFileSizeUpload)),
});
return;
}

View file

@ -238,7 +238,7 @@ function BaseModal({
const contentClasses = cn(
minWidth,
height,
"flex flex-col flex-1 overflow-hidden",
"flex flex-col flex-1 overflow-hidden max-h-[98dvh]",
className,
);
@ -260,6 +260,7 @@ function BaseModal({
<Dialog open={open} onOpenChange={setOpen}>
{triggerChild}
<DialogContent
onClick={(e) => e.stopPropagation()}
onOpenAutoFocus={(event) => event.preventDefault()}
onEscapeKeyDown={onEscapeKeyDown}
className={contentClasses}

View file

@ -0,0 +1,145 @@
import ShadTooltip from "@/components/common/shadTooltipComponent";
import useUploadFile from "@/hooks/files/use-upload-file";
import useAlertStore from "@/stores/alertStore";
import { useUtilityStore } from "@/stores/utilityStore";
import { formatFileSize } from "@/utils/stringManipulation";
import { useState } from "react";
export default function DragFilesComponent({
onUpload,
types,
isList,
}: {
onUpload: (filesPaths: string[]) => void;
types: string[];
isList: boolean;
}) {
const [isDragging, setIsDragging] = useState(false);
const uploadFile = useUploadFile({
types,
multiple: isList,
});
const maxFileSizeUpload = useUtilityStore((state) => state.maxFileSizeUpload);
const setErrorData = useAlertStore((state) => state.setErrorData);
const setSuccessData = useAlertStore((state) => state.setSuccessData);
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
if (e.dataTransfer.types.some((type) => type === "Files")) {
setIsDragging(true);
}
};
const handleDragEnter = (e: React.DragEvent) => {
e.preventDefault();
if (e.dataTransfer.types.some((type) => type === "Files")) {
setIsDragging(true);
}
};
const handleDragLeave = (e: React.DragEvent) => {
e.preventDefault();
setIsDragging(false);
};
const handleDrop = async (e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragging(false);
const droppedFiles = Array.from(e.dataTransfer.files);
if (droppedFiles.length > 0) {
try {
const filesIds = await uploadFile({
files: droppedFiles,
});
onUpload(filesIds);
setSuccessData({
title: `File${filesIds.length > 1 ? "s" : ""} uploaded successfully`,
});
} catch (error: any) {
setErrorData({
title: "Error uploading file",
list: [error.message || "An error occurred while uploading the file"],
});
}
}
};
const handleClick = async () => {
try {
const filesIds = await uploadFile({});
onUpload(filesIds);
setSuccessData({
title: `File${filesIds.length > 1 ? "s" : ""} uploaded successfully`,
});
} catch (error: any) {
setErrorData({
title: "Error uploading file",
list: [error.message || "An error occurred while uploading the file"],
});
}
};
return (
<div className="flex flex-col items-center justify-center">
<div
className={`relative flex h-full w-full cursor-pointer flex-col items-center justify-center gap-2 rounded-2xl p-8 transition-colors ${
isDragging ? "bg-accent-foreground/10" : ""
}`}
onDragOver={handleDragOver}
onDragEnter={handleDragEnter}
onDragLeave={handleDragLeave}
onDrop={handleDrop}
onClick={handleClick}
data-testid="drag-files-component"
role="button"
tabIndex={0}
>
<h3 className="text-sm font-semibold">
{isDragging ? "Drop files here" : "Click or drag files here"}
</h3>
<p className="flex items-center gap-1 text-xs text-muted-foreground">
<span>{types.slice(0, 3).join(", ")}</span>
{types.length > 3 && (
<ShadTooltip content={types.slice(3).join(", ")}>
<span className="text-accent-pink-foreground underline">
+{types.length - 3} more
</span>
</ShadTooltip>
)}
<span className="font-semibold">
{formatFileSize(maxFileSizeUpload)}
</span>
<span>max</span>
</p>
<div className="pointer-events-none absolute inset-0 h-full w-full">
<svg
width="100%"
height="100%"
className="overflow-visible stroke-muted-foreground/50"
style={{
position: "absolute",
top: 1,
left: 1,
right: 0,
bottom: 0,
}}
>
<rect
width="99.5%"
height="99.5%"
fill="none"
rx="16"
ry="16"
strokeWidth="1"
strokeDasharray="5,5"
strokeDashoffset="0"
strokeLinecap="butt"
/>
</svg>
</div>
</div>
</div>
);
}

View file

@ -0,0 +1,166 @@
import ForwardedIconComponent from "@/components/common/genericIconComponent";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { useGetDownloadFileV2 } from "@/controllers/API/queries/file-management";
import { useDeleteFileV2 } from "@/controllers/API/queries/file-management/use-delete-file";
import { useDuplicateFileV2 } from "@/controllers/API/queries/file-management/use-duplicate-file";
import ConfirmationModal from "@/modals/confirmationModal";
import useAlertStore from "@/stores/alertStore";
import { FileType } from "@/types/file_management";
import { ReactNode, useState } from "react";
export default function FilesContextMenuComponent({
children,
file,
handleRename,
simplified,
}: {
children: ReactNode;
file: FileType;
handleRename: (id: string, name: string) => void;
simplified?: boolean;
}) {
const isLocal = file.provider == null;
const [showDeleteConfirmation, setShowDeleteConfirmation] = useState(false);
const setSuccessData = useAlertStore((state) => state.setSuccessData);
const { mutate: downloadFile } = useGetDownloadFileV2({
id: file.id,
filename: file.name,
type: file.path.split(".").pop() || "",
});
const { mutate: deleteFile } = useDeleteFileV2({
id: file.id,
});
const { mutate: duplicateFile } = useDuplicateFileV2({
id: file.id,
filename: file.name,
type: file.path.split(".").pop() || "",
});
const handleSelectOptionsChange = (option: string) => {
switch (option) {
case "rename":
handleRename(file.id, file.name);
break;
case "replace":
console.log("replace");
break;
case "download":
downloadFile();
break;
case "delete":
setShowDeleteConfirmation(true);
break;
case "duplicate":
duplicateFile();
break;
}
};
return (
<>
<DropdownMenu>
<DropdownMenuTrigger asChild>{children}</DropdownMenuTrigger>
<DropdownMenuContent sideOffset={0} side="bottom" className="-ml-24">
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
handleSelectOptionsChange("rename");
}}
className="cursor-pointer"
data-testid="btn-rename-file"
>
<ForwardedIconComponent
name="SquarePen"
aria-hidden="true"
className="mr-2 h-4 w-4"
/>
Rename
</DropdownMenuItem>
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
handleSelectOptionsChange("download");
}}
className="cursor-pointer"
data-testid="btn-download-json"
>
<ForwardedIconComponent
name="Download"
aria-hidden="true"
className="mr-2 h-4 w-4"
/>
Download
</DropdownMenuItem>
{!simplified && (
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
handleSelectOptionsChange("duplicate");
}}
className="cursor-pointer"
data-testid="btn-duplicate-flow"
>
<ForwardedIconComponent
name="CopyPlus"
aria-hidden="true"
className="mr-2 h-4 w-4"
/>
Duplicate
</DropdownMenuItem>
)}
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
handleSelectOptionsChange("delete");
}}
className="cursor-pointer text-destructive"
data-testid="btn-delete-file"
>
<ForwardedIconComponent
name={isLocal ? "Trash2" : "ListX"}
aria-hidden="true"
className="mr-2 h-4 w-4"
/>
{isLocal ? "Delete" : "Remove"}
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
<ConfirmationModal
open={showDeleteConfirmation}
onClose={() => setShowDeleteConfirmation(false)}
onCancel={() => setShowDeleteConfirmation(false)}
title={isLocal ? "Delete File" : "Remove File"}
titleHeader={`Are you sure you want to ${isLocal ? "delete" : "remove"} "${file.name}"?`}
cancelText="Cancel"
size="x-small"
confirmationText={isLocal ? "Delete" : "Remove"}
icon={isLocal ? "Trash2" : "ListX"}
destructive
onConfirm={() => {
deleteFile();
setSuccessData({
title: "The file has been deleted successfully",
});
setShowDeleteConfirmation(false);
}}
>
<ConfirmationModal.Content>
<div className="text-sm text-muted-foreground">
{isLocal
? "This action cannot be undone. The file will be permanently deleted."
: "This will remove the file from your list. You can add it back later if needed."}
</div>
</ConfirmationModal.Content>
</ConfirmationModal>
</>
);
}

View file

@ -0,0 +1,204 @@
import ForwardedIconComponent from "@/components/common/genericIconComponent";
import ShadTooltip from "@/components/common/shadTooltipComponent";
import { Button } from "@/components/ui/button";
import { Checkbox } from "@/components/ui/checkbox";
import { Input } from "@/components/ui/input";
import { usePostUploadFileV2 } from "@/controllers/API/queries/file-management";
import { FileType } from "@/types/file_management";
import { formatFileSize } from "@/utils/stringManipulation";
import { FILE_ICONS } from "@/utils/styleUtils";
import { cn } from "@/utils/utils";
import { useEffect, useState } from "react";
import FilesContextMenuComponent from "../../../filesContextMenuComponent";
export default function FileRendererComponent({
file,
handleFileSelect,
selectedFiles,
handleRemove,
handleRename,
index,
}: {
file: FileType;
handleFileSelect?: (path: string) => void;
selectedFiles?: string[];
handleRemove?: (path: string) => void;
handleRename?: (id: string, name: string) => void;
index: number;
}) {
const type = file.path.split(".").pop() ?? "";
const [openRename, setOpenRename] = useState(false);
const [newName, setNewName] = useState(file.name);
const handleOpenRename = () => {
handleRename && setOpenRename(true);
};
const { mutate: uploadFile } = usePostUploadFileV2();
useEffect(() => {
setNewName(file.name);
}, [openRename]);
return (
<div
key={index}
className={cn(
"flex w-full items-center justify-between gap-2 overflow-hidden rounded-lg py-2",
handleFileSelect ? "cursor-pointer px-3 hover:bg-accent" : "",
)}
onClick={() => {
if (!file.progress) handleFileSelect?.(file.path);
}}
>
<div className="flex w-full items-center gap-4 overflow-hidden">
{handleFileSelect && (
<div
className={cn(
"flex shrink-0",
file.progress !== undefined &&
"pointer-events-none cursor-not-allowed",
)}
onClick={(e) => e.stopPropagation()}
>
<Checkbox
data-testid={`checkbox-${file.name}`}
checked={selectedFiles?.includes(file.path)}
onCheckedChange={() => handleFileSelect?.(file.path)}
/>
</div>
)}
<div className="flex w-full items-center gap-2 overflow-hidden">
{file.progress !== undefined && file.progress !== -1 ? (
<div className="flex h-6 items-center justify-center text-xs font-semibold text-muted-foreground">
{Math.round(file.progress * 100)}%
</div>
) : (
<ForwardedIconComponent
name={FILE_ICONS[type]?.icon ?? "File"}
className={cn(
"h-6 w-6 shrink-0",
file.progress !== undefined
? "text-placeholder-foreground"
: (FILE_ICONS[type]?.color ?? undefined),
)}
/>
)}
{openRename ? (
<div className="w-full">
<Input
value={newName}
autoFocus
onChange={(e) => setNewName(e.target.value)}
onBlur={() => {
setOpenRename(false);
handleRename?.(file.id, newName);
}}
onKeyDown={(e) => {
if (e.key === "Enter") {
setOpenRename(false);
handleRename?.(file.id, newName);
}
}}
onClick={(e) => e.stopPropagation()}
className="h-6 py-1"
data-testid={`rename-input-${file.name}`}
/>
</div>
) : (
<span
className={cn(
"flex flex-1 items-center gap-2 overflow-hidden text-sm font-medium",
file.progress !== undefined &&
file.progress === -1 &&
"pointer-events-none text-placeholder-foreground",
)}
onDoubleClick={(e) => {
e.stopPropagation();
if (!file.progress && !handleRemove) {
setOpenRename(true);
}
}}
>
<ShadTooltip content={`${file.name}.${type}`} side="bottom">
<span
className={cn(
"w-full cursor-text overflow-hidden truncate",
handleRemove && "cursor-default",
)}
>
{file.name}.{type}
</span>
</ShadTooltip>
<span className="shrink-0 cursor-default text-xs font-normal text-muted-foreground">
{formatFileSize(file.size)}
</span>
</span>
)}
{file.progress !== undefined && file.progress === -1 ? (
<span className="text-[13px] text-primary">
Upload failed,{" "}
<span
className="cursor-pointer text-accent-pink-foreground underline"
onClick={(e) => {
e.stopPropagation();
if (file.file) {
uploadFile({ file: file.file });
}
}}
>
try again?
</span>
</span>
) : (
<></>
)}
</div>
</div>
<div className="flex shrink-0 items-center gap-2">
{handleRemove ? (
<Button
size="iconMd"
variant="ghost"
className="hover:bg-accent"
data-testid={`remove-file-button-${file.name}`}
onClick={(e) => {
e.stopPropagation();
handleRemove?.(file.path);
}}
>
<ForwardedIconComponent
name="X"
className="h-5 w-5 shrink-0 text-muted-foreground"
/>
</Button>
) : file.progress === undefined ? (
<FilesContextMenuComponent
handleRename={handleOpenRename}
file={file}
simplified
>
<Button
size="iconMd"
data-testid={`context-menu-button-${file.name}`}
variant="ghost"
className="hover:bg-secondary-foreground/5"
onClick={(e) => {
e.stopPropagation();
}}
>
<ForwardedIconComponent
name="EllipsisVertical"
className="h-5 w-5 shrink-0"
/>
</Button>
</FilesContextMenuComponent>
) : (
<></>
)}
</div>
</div>
);
}

View file

@ -0,0 +1,29 @@
import { FileType } from "@/types/file_management";
import FileRendererComponent from "./components/fileRendererComponent";
export default function FilesRendererComponent({
files,
handleFileSelect,
selectedFiles,
handleRemove,
handleRename,
}: {
files: FileType[];
isSearch?: boolean;
handleFileSelect?: (name: string) => void;
selectedFiles?: string[];
handleRemove?: (name: string) => void;
handleRename?: (id: string, name: string) => void;
}) {
return files.map((file, index) => (
<FileRendererComponent
key={index}
file={file}
handleFileSelect={handleFileSelect}
selectedFiles={selectedFiles}
handleRemove={handleRemove}
handleRename={handleRename}
index={index}
/>
));
}

View file

@ -0,0 +1,35 @@
import { MorphingMenu } from "@/components/ui/morphing-menu";
export default function ImportButtonComponent({
variant = "large",
}: {
variant?: "large" | "small";
}) {
const items = [
{
icon: "GoogleDrive",
label: "Drive",
onClick: () => {
// Handle Google Drive click
},
},
{
icon: "OneDrive",
label: "OneDrive",
onClick: () => {
// Handle OneDrive click
},
},
{
icon: "AWSInverted",
label: "S3 Bucket",
onClick: () => {
// Handle S3 click
},
},
];
return (
<MorphingMenu variant={variant} trigger="Import from..." items={items} />
);
}

View file

@ -0,0 +1,128 @@
import { Input } from "@/components/ui/input";
import { usePostRenameFileV2 } from "@/controllers/API/queries/file-management/use-put-rename-file";
import { CustomLink } from "@/customization/components/custom-link";
import { sortByBoolean, sortByDate } from "@/pages/MainPage/utils/sort-flows";
import { FileType } from "@/types/file_management";
import Fuse from "fuse.js";
import { useEffect, useMemo, useState } from "react";
import FilesRendererComponent from "../filesRendererComponent";
export default function RecentFilesComponent({
files,
selectedFiles,
setSelectedFiles,
types,
isList,
}: {
selectedFiles: string[];
files: FileType[];
setSelectedFiles: (files: string[]) => void;
types: string[];
isList: boolean;
}) {
const filesWithType = files.map((file) => ({
...file,
type: file.path.split(".").pop()?.toLowerCase(),
}));
const [fuse, setFuse] = useState<Fuse<FileType>>(new Fuse([]));
const [searchQuery, setSearchQuery] = useState("");
const { mutate: renameFile } = usePostRenameFileV2();
const searchResults = useMemo(() => {
const filteredFiles = (
searchQuery
? fuse.search(searchQuery).map(({ item }) => item)
: (filesWithType ?? [])
).filter((file) => {
const fileExtension = file.path.split(".").pop()?.toLowerCase();
return fileExtension && (!types || types.includes(fileExtension));
});
return filteredFiles;
}, [searchQuery, filesWithType, selectedFiles, types]);
useEffect(() => {
if (filesWithType) {
setFuse(
new Fuse(filesWithType, {
keys: ["name", "type"],
threshold: 0.3,
}),
);
}
}, [filesWithType]);
const handleFileSelect = (filePath: string) => {
setSelectedFiles(
selectedFiles.includes(filePath)
? selectedFiles.filter((path) => path !== filePath)
: isList
? [...selectedFiles, filePath]
: [filePath],
);
};
const handleRename = (id: string, name: string) => {
renameFile({ id, name });
};
return (
<div className="flex flex-col gap-4 overflow-hidden">
<div className="flex items-center justify-between">
<div className="flex-1">
<Input
icon="Search"
placeholder="Search files..."
inputClassName="h-8"
data-testid="search-files-input"
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
/>
</div>
{/* <div className="flex w-48 justify-end">
<ImportButtonComponent variant="small" />
</div> */}
</div>
<div
className={`flex h-80 min-h-80 flex-col gap-1 overflow-y-auto overflow-x-hidden`}
>
{searchResults.length > 0 ? (
<FilesRendererComponent
files={searchResults
.toSorted((a, b) => {
const selectedOrder = sortByBoolean(
selectedFiles.includes(a.path) || a.progress !== undefined,
selectedFiles.includes(b.path) || b.progress !== undefined,
);
return selectedOrder === 0
? sortByDate(
a.updated_at ?? a.created_at,
b.updated_at ?? b.created_at,
)
: selectedOrder;
})
.slice(0, 10)}
handleFileSelect={handleFileSelect}
selectedFiles={selectedFiles}
handleRename={handleRename}
/>
) : (
<div className="flex h-full w-full items-center justify-center text-sm">
<span>
{searchQuery !== ""
? "No files found, try again "
: "Upload or import files, "}
or visit{" "}
<CustomLink
className="text-accent-pink-foreground underline"
to="/files"
>
My Files.
</CustomLink>
</span>
</div>
)}
</div>
</div>
);
}

View file

@ -0,0 +1,113 @@
import useAlertStore from "@/stores/alertStore";
import { FileType } from "@/types/file_management";
import { useQueryClient } from "@tanstack/react-query";
import { ReactNode, useEffect, useState } from "react";
import { ForwardedIconComponent } from "../../components/common/genericIconComponent";
import BaseModal from "../baseModal";
import DragFilesComponent from "./components/dragFilesComponent";
import RecentFilesComponent from "./components/recentFilesComponent";
export default function FileManagerModal({
children,
handleSubmit,
selectedFiles,
disabled,
files,
types,
isList,
}: {
children?: ReactNode;
selectedFiles?: string[];
open?: boolean;
handleSubmit: (files: string[]) => void;
setOpen?: (open: boolean) => void;
disabled?: boolean;
files: FileType[];
types: string[];
isList?: boolean;
}): JSX.Element {
const [internalOpen, internalSetOpen] = useState(false);
const setErrorData = useAlertStore((state) => state.setErrorData);
const queryClient = useQueryClient();
useEffect(() => {
queryClient.refetchQueries({
queryKey: ["useGetFilesV2"],
});
}, [internalOpen]);
const [internalSelectedFiles, setInternalSelectedFiles] = useState<string[]>(
selectedFiles || [],
);
useEffect(() => {
setInternalSelectedFiles(selectedFiles || []);
}, [internalOpen]);
const handleUpload = (filesPaths: string[]) => {
setInternalSelectedFiles(
isList ? [...internalSelectedFiles, ...filesPaths] : [filesPaths[0]],
);
};
return (
<>
<BaseModal
size="smaller-h-full"
open={!disabled && internalOpen}
setOpen={internalSetOpen}
onSubmit={() => {
if (internalSelectedFiles.length === 0) {
setErrorData({
title: "Please select at least one file",
});
return;
}
handleSubmit(internalSelectedFiles);
internalSetOpen(false);
}}
>
<BaseModal.Trigger asChild>
{children ? children : <></>}
</BaseModal.Trigger>
<BaseModal.Header description={null}>
<span className="flex items-center gap-2 font-medium">
<div className="rounded-md bg-muted p-1.5">
<ForwardedIconComponent name="File" className="h-5 w-5" />
</div>
My Files
</span>
</BaseModal.Header>
<BaseModal.Content overflowHidden>
<div className="flex flex-col gap-4 overflow-hidden">
<div className="flex shrink-0 flex-col">
<DragFilesComponent
onUpload={handleUpload}
types={types}
isList={isList ?? false}
/>
</div>
<div className="flex flex-1 flex-col overflow-hidden">
<RecentFilesComponent
files={files}
selectedFiles={internalSelectedFiles}
setSelectedFiles={setInternalSelectedFiles}
types={types}
isList={isList ?? false}
/>
</div>
</div>
</BaseModal.Content>
<BaseModal.Footer
submit={{
label: `Select files`,
dataTestId: "select-files-modal-button",
}}
></BaseModal.Footer>
</BaseModal>
</>
);
}

View file

@ -26,7 +26,11 @@ const useFileDrop = (type?: string) => {
uploadFlow({
files,
isComponent:
type === "component" ? true : type === "flow" ? false : undefined,
type === "components"
? true
: type === "flows"
? false
: undefined,
})
.then(() => {
setSuccessData({

View file

@ -1,7 +1,9 @@
import LangflowLogo from "@/assets/LangflowLogo.svg?react";
import ForwardedIconComponent from "@/components/common/genericIconComponent";
import CardsWrapComponent from "@/components/core/cardsWrapComponent";
import { Button } from "@/components/ui/button";
import { useFolderStore } from "@/stores/foldersStore";
import useFileDrop from "../../hooks/use-on-file-drop";
type EmptyPageProps = {
setOpenModal: (open: boolean) => void;
@ -9,69 +11,79 @@ type EmptyPageProps = {
export const EmptyPage = ({ setOpenModal }: EmptyPageProps) => {
const folders = useFolderStore((state) => state.folders);
const handleFileDrop = useFileDrop(undefined);
return (
<div className="m-0 h-full w-full bg-secondary p-0">
<div className="text-container">
<div className="relative z-20 flex w-full flex-col items-center justify-center gap-2">
<LangflowLogo className="h-7 w-8" />
<h3
className="pt-5 font-chivo text-2xl font-semibold text-foreground"
data-testid="mainpage_title"
>
{folders?.length > 1 ? "Empty folder" : "Start building"}
</h3>
<p className="pb-5 text-sm text-secondary-foreground">
Begin with a template, or start from scratch.
</p>
<Button
variant="default"
onClick={() => setOpenModal(true)}
id="new-project-btn"
>
<ForwardedIconComponent
name="Plus"
aria-hidden="true"
className="h-4 w-4"
/>
<span className="hidden whitespace-nowrap font-semibold md:inline">
New Flow
</span>
</Button>
<CardsWrapComponent
dragMessage={`Drop your flows or components here`}
onFileDrop={handleFileDrop}
>
<div className="m-0 h-full w-full bg-secondary p-0">
<div className="text-container">
<div className="relative z-20 flex w-full flex-col items-center justify-center gap-2">
<LangflowLogo className="h-7 w-8" />
<h3
className="pt-5 font-chivo text-2xl font-semibold text-foreground"
data-testid="mainpage_title"
>
{folders?.length > 1 ? "Empty folder" : "Start building"}
</h3>
<p className="pb-5 text-sm text-secondary-foreground">
Begin with a template, or start from scratch.
</p>
<Button
variant="default"
onClick={() => setOpenModal(true)}
id="new-project-btn"
>
<ForwardedIconComponent
name="Plus"
aria-hidden="true"
className="h-4 w-4"
/>
<span className="hidden whitespace-nowrap font-semibold md:inline">
New Flow
</span>
</Button>
</div>
</div>
<div className="gradient-bg">
<svg xmlns="http://www.w3.org/2000/svg" width="100%" height="100%">
<defs>
<filter id="lf-balls">
<feGaussianBlur
in="turbulence"
stdDeviation="10"
result="blur"
/>
<feColorMatrix
in="blur"
type="matrix"
values="1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 18 -8"
result="color-matrix"
/>
<feBlend in="SourceGraphic" in2="color-matrix" mode="normal" />
</filter>
<filter id="lf-noise">
<feTurbulence
type="fractalNoise"
baseFrequency="0.65"
stitchTiles="stitch"
/>
</filter>
</defs>
</svg>
<div className="gradients-container">
<div className="g1" />
<div className="g2" />
<div className="g3" />
<div className="g4" />
<div className="g5" />
<div className="g6" />
</div>
</div>
</div>
<div className="gradient-bg">
<svg xmlns="http://www.w3.org/2000/svg" width="100%" height="100%">
<defs>
<filter id="lf-balls">
<feGaussianBlur in="turbulence" stdDeviation="10" result="blur" />
<feColorMatrix
in="blur"
type="matrix"
values="1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 18 -8"
result="color-matrix"
/>
<feBlend in="SourceGraphic" in2="color-matrix" mode="normal" />
</filter>
<filter id="lf-noise">
<feTurbulence
type="fractalNoise"
baseFrequency="0.65"
stitchTiles="stitch"
/>
</filter>
</defs>
</svg>
<div className="gradients-container">
<div className="g1" />
<div className="g2" />
<div className="g3" />
<div className="g4" />
<div className="g5" />
<div className="g6" />
</div>
</div>
</div>
</CardsWrapComponent>
);
};

View file

@ -0,0 +1,120 @@
import useFlowsManagerStore from "@/stores/flowsManagerStore";
import { cn } from "@/utils/utils";
import { useEffect, useState } from "react";
export default function DragWrapComponent({
onFileDrop,
children,
}: {
onFileDrop?: (e: any) => void;
children: JSX.Element | JSX.Element[];
}) {
const [isDragging, setIsDragging] = useState(false);
const [mousePosition, setMousePosition] = useState({ x: 0, y: 0 });
const isIOModalOpen = useFlowsManagerStore((state) => state.IOModalOpen);
const [filesCount, setFilesCount] = useState(0);
useEffect(() => {
// Function to handle visibility change
const handleVisibilityChange = () => {
if (document.visibilityState === "visible") {
// Reset hover state or perform any necessary actions when the tab becomes visible again
setIsDragging(false);
}
};
// Add event listener for visibility change
document.addEventListener("visibilitychange", handleVisibilityChange);
// Cleanup event listener on component unmount
return () => {
document.removeEventListener("visibilitychange", handleVisibilityChange);
};
}, []);
const dragOver = (e) => {
e.preventDefault();
setMousePosition({ x: e.clientX, y: e.clientY });
if (
e.dataTransfer.types.some((types) => types === "Files") &&
onFileDrop &&
!isIOModalOpen
) {
setIsDragging(true);
setFilesCount(e.dataTransfer.items.length);
}
};
const dragEnter = (e) => {
if (
e.dataTransfer.types.some((types) => types === "Files") &&
onFileDrop &&
!isIOModalOpen
) {
setIsDragging(true);
setFilesCount(e.dataTransfer.items.length);
}
e.preventDefault();
};
const dragLeave = (e) => {
e.preventDefault();
if (onFileDrop && !isIOModalOpen) {
setIsDragging(false);
}
};
const onDrop = (e) => {
e.preventDefault();
if (onFileDrop && !isIOModalOpen) onFileDrop(e);
setIsDragging(false);
};
const image = `url("data:image/svg+xml,%3Csvg width='100%25' height='100%25' xmlns='http://www.w3.org/2000/svg'%3E%3Crect width='100%25' height='100%25' fill='none' rx='16' ry='16' stroke='%23FFFFFF' stroke-width='2px' stroke-dasharray='5%2c 5' stroke-dashoffset='0' stroke-linecap='butt'/%3E%3C/svg%3E")`;
return (
<div
onDragOver={dragOver}
onDragEnter={dragEnter}
onDragLeave={dragLeave}
onDrop={onDrop}
className={cn("relative h-full w-full transition-all")}
data-testid="drag-wrap-component"
>
<div
className={cn(
"h-full w-full transition-all",
isDragging ? "opacity-50" : "",
)}
>
{children}
</div>
<div
className={cn(
"pointer-events-none absolute top-0 h-full w-full rounded-2xl bg-placeholder-foreground transition-all",
isDragging ? "opacity-100" : "opacity-0",
)}
style={{
WebkitMaskImage: image,
maskImage: image,
}}
/>
{isDragging && (
<div
className="pointer-events-none fixed -translate-x-1/2"
style={{
left: `${mousePosition.x}px`,
top: `${mousePosition.y + 55}px`,
}}
>
<div className="w-44 rounded-2xl bg-accent-indigo-foreground px-2.5 py-0.5 text-center backdrop-blur-sm">
<span className="font-mono text-xs text-primary-foreground">
Drop file{filesCount > 1 ? "s" : ""} to upload
</span>
</div>
</div>
)}
</div>
);
}

View file

@ -0,0 +1,340 @@
import ForwardedIconComponent from "@/components/common/genericIconComponent";
import ShadTooltip from "@/components/common/shadTooltipComponent";
import CardsWrapComponent from "@/components/core/cardsWrapComponent";
import TableComponent from "@/components/core/parameterRenderComponent/components/tableComponent";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import Loading from "@/components/ui/loading";
import { SidebarTrigger } from "@/components/ui/sidebar";
import {
useGetFilesV2,
usePostUploadFileV2,
} from "@/controllers/API/queries/file-management";
import { usePostRenameFileV2 } from "@/controllers/API/queries/file-management/use-put-rename-file";
import useUploadFile from "@/hooks/files/use-upload-file";
import FilesContextMenuComponent from "@/modals/fileManagerModal/components/filesContextMenuComponent";
import useAlertStore from "@/stores/alertStore";
import { formatFileSize } from "@/utils/stringManipulation";
import { FILE_ICONS } from "@/utils/styleUtils";
import { cn } from "@/utils/utils";
import { ColDef, NewValueParams } from "ag-grid-community";
import { AgGridReact } from "ag-grid-react";
import { useMemo, useRef, useState } from "react";
import { sortByDate } from "../../utils/sort-flows";
import DragWrapComponent from "./components/dragWrapComponent";
export const FilesPage = () => {
const tableRef = useRef<AgGridReact<any>>(null);
const { data: files } = useGetFilesV2();
const setErrorData = useAlertStore((state) => state.setErrorData);
const setSuccessData = useAlertStore((state) => state.setSuccessData);
const { mutate: rename } = usePostRenameFileV2();
const handleRename = (params: NewValueParams<any, any>) => {
rename({
id: params.data.id,
name: params.newValue,
});
};
const handleOpenRename = (id: string, name: string) => {
if (tableRef.current) {
tableRef.current.api.startEditingCell({
rowIndex: files?.findIndex((file) => file.id === id) ?? 0,
colKey: "name",
});
}
};
const uploadFile = useUploadFile({ multiple: true });
const handleUpload = async (files?: File[]) => {
try {
const filesIds = await uploadFile({
files: files,
});
setSuccessData({
title: `File${filesIds.length > 1 ? "s" : ""} uploaded successfully`,
});
} catch (error: any) {
setErrorData({
title: "Error uploading file",
list: [error.message || "An error occurred while uploading the file"],
});
}
};
const { mutate: uploadFileDirect } = usePostUploadFileV2();
const colDefs: ColDef[] = [
{
headerName: "Name",
field: "name",
flex: 2,
editable: true,
filter: "agTextColumnFilter",
cellClass: "cursor-text select-text",
cellRenderer: (params) => {
const type = params.data.path.split(".")[1]?.toLowerCase();
return (
<div className="flex items-center gap-2 font-medium">
{params.data.progress !== undefined &&
params.data.progress !== -1 ? (
<div className="flex h-6 items-center justify-center text-xs font-semibold text-muted-foreground">
{Math.round(params.data.progress * 100)}%
</div>
) : (
<ForwardedIconComponent
name={FILE_ICONS[type]?.icon ?? "File"}
className={cn(
"h-6 w-6 shrink-0",
params.data.progress !== undefined
? "text-placeholder-foreground"
: (FILE_ICONS[type]?.color ?? undefined),
)}
/>
)}
<div
className={cn(
"flex cursor-text items-center gap-2 text-sm font-medium",
params.data.progress !== undefined &&
params.data.progress === -1 &&
"pointer-events-none text-placeholder-foreground",
)}
>
{params.value}.{type}
</div>
{params.data.progress !== undefined &&
params.data.progress === -1 ? (
<span className="text-[13px] text-primary">
Upload failed,{" "}
<span
className="cursor-pointer text-accent-pink-foreground underline"
onClick={(e) => {
e.stopPropagation();
if (params.data.file) {
uploadFileDirect({ file: params.data.file });
}
}}
>
try again?
</span>
</span>
) : (
<></>
)}
</div>
);
}, //This column will be twice as wide as the others
}, //This column will be twice as wide as the others
{
headerName: "Type",
field: "path",
flex: 1,
filter: "agTextColumnFilter",
editable: false,
valueFormatter: (params) => {
return params.value.split(".")[1]?.toUpperCase();
},
cellClass: "text-muted-foreground cursor-text select-text",
},
{
headerName: "Size",
field: "size",
flex: 1,
valueFormatter: (params) => {
return formatFileSize(params.value);
},
editable: false,
cellClass: "text-muted-foreground cursor-text select-text",
},
{
headerName: "Modified",
field: "updated_at",
valueFormatter: (params) => {
return params.data.progress
? ""
: new Date(params.value + "Z").toLocaleString();
},
editable: false,
flex: 1,
resizable: false,
cellClass: "text-muted-foreground cursor-text select-text",
},
{
maxWidth: 60,
editable: false,
resizable: false,
cellClass: "cursor-default",
cellRenderer: (params) => {
return (
<div className="flex h-full cursor-default items-center justify-center">
{!params.data.progress && (
<FilesContextMenuComponent
file={params.data}
handleRename={handleOpenRename}
>
<Button variant="ghost" size="iconMd">
<ForwardedIconComponent name="EllipsisVertical" />
</Button>
</FilesContextMenuComponent>
)}
</div>
);
},
},
];
const onFileDrop = async (e: React.DragEvent) => {
e.preventDefault;
e.stopPropagation();
const droppedFiles = Array.from(e.dataTransfer.files);
if (droppedFiles.length > 0) {
await handleUpload(droppedFiles);
}
};
const UploadButtonComponent = useMemo(() => {
return (
<ShadTooltip content="Upload File" side="bottom">
<Button
className="!px-3 md:!px-4 md:!pl-3.5"
onClick={async () => {
await handleUpload();
}}
id="upload-file-btn"
data-testid="upload-file-btn"
>
<ForwardedIconComponent
name="Plus"
aria-hidden="true"
className="h-4 w-4"
/>
<span className="hidden whitespace-nowrap font-semibold md:inline">
Upload
</span>
</Button>
</ShadTooltip>
);
}, [uploadFile]);
const [quickFilterText, setQuickFilterText] = useState("");
return (
<div
className="flex h-full w-full flex-col overflow-y-auto"
data-testid="cards-wrapper"
>
<div className="flex h-full w-full flex-col xl:container">
<div className="flex flex-1 flex-col justify-start px-5 pt-10">
<div className="flex h-full flex-col justify-start">
<div
className="flex items-center pb-8 text-xl font-semibold"
data-testid="mainpage_title"
>
<div className="h-7 w-10 transition-all group-data-[open=true]/sidebar-wrapper:md:w-0 lg:hidden">
<div className="relative left-0 opacity-100 transition-all group-data-[open=true]/sidebar-wrapper:md:opacity-0">
<SidebarTrigger>
<ForwardedIconComponent
name="PanelLeftOpen"
aria-hidden="true"
className=""
/>
</SidebarTrigger>
</div>
</div>
My Files
</div>
{files && files.length !== 0 ? (
<div className="flex justify-between">
<div className="flex w-full xl:w-5/12">
<Input
icon="Search"
data-testid="search-store-input"
type="text"
placeholder={`Search files...`}
className="mr-2 w-full"
value={quickFilterText || ""}
onChange={(event) => {
setQuickFilterText(event.target.value);
}}
/>
</div>
<div className="flex items-center gap-2">
{UploadButtonComponent}
{/* <ImportButtonComponent /> */}
</div>
</div>
) : (
<></>
)}
<div className="flex h-full flex-col py-4">
{!files || !Array.isArray(files) ? (
<div className="flex h-full w-full items-center justify-center">
<Loading />
</div>
) : files.length > 0 ? (
<DragWrapComponent onFileDrop={onFileDrop}>
<TableComponent
rowHeight={45}
headerHeight={45}
cellSelection={false}
tableOptions={{
hide_options: true,
}}
editable={[
{
field: "name",
onUpdate: handleRename,
editableCell: true,
},
]}
enableCellTextSelection={false}
columnDefs={colDefs}
rowData={files.sort((a, b) => {
return sortByDate(
a.updated_at ?? a.created_at,
b.updated_at ?? b.created_at,
);
})}
className="ag-no-border w-full"
pagination
ref={tableRef}
quickFilterText={quickFilterText}
gridOptions={{
enableCellTextSelection: true,
stopEditingWhenCellsLoseFocus: true,
ensureDomOrder: true,
colResizeDefault: "shift",
}}
/>
</DragWrapComponent>
) : (
<CardsWrapComponent
onFileDrop={onFileDrop}
dragMessage="Drop files to upload"
>
<div className="flex h-full w-full flex-col items-center justify-center gap-8 pb-8">
<div className="flex flex-col items-center gap-2">
<h3 className="text-2xl font-semibold">No files</h3>
<p className="text-lg text-secondary-foreground">
Upload files or import from your preferred cloud.
</p>
</div>
<div className="flex items-center gap-2">
{UploadButtonComponent}
{/* <ImportButtonComponent /> */}
</div>
</div>
</CardsWrapComponent>
)}
</div>
</div>
</div>
</div>
</div>
);
};
export default FilesPage;

View file

@ -26,7 +26,7 @@ const HomePage = ({ type }) => {
const [pageIndex, setPageIndex] = useState(1);
const [pageSize, setPageSize] = useState(12);
const [search, setSearch] = useState("");
const handleFileDrop = useFileDrop("flows");
const [flowType, setFlowType] = useState<"flows" | "components">(type);
const myCollectionId = useFolderStore((state) => state.myCollectionId);
const folders = useFolderStore((state) => state.folders);
@ -77,10 +77,25 @@ const HomePage = ({ type }) => {
flows?.find((flow) => flow.folder_id === (folderId ?? myCollectionId)) ===
undefined;
const handleFileDrop = useFileDrop(isEmptyFolder ? undefined : flowType);
useEffect(() => {
if (
!isEmptyFolder &&
flows?.find(
(flow) =>
flow.folder_id === (folderId ?? myCollectionId) &&
flow.is_component === (flowType === "components"),
) === undefined
) {
setFlowType(flowType === "flows" ? "components" : "flows");
}
}, [isEmptyFolder]);
return (
<CardsWrapComponent
onFileDrop={handleFileDrop}
dragMessage={`Drag your ${folderName} here`}
dragMessage={`Drop your ${isEmptyFolder ? "flows or components" : flowType} here`}
>
<div
className="flex h-full w-full flex-col overflow-y-auto"

View file

@ -1,4 +1,3 @@
import CardsWrapComponent from "@/components/core/cardsWrapComponent";
import SideBarFoldersButtonsComponent from "@/components/core/folderSidebarComponent/components/sideBarFolderButtons";
import { SidebarProvider } from "@/components/ui/sidebar";
import { useDeleteFolders } from "@/controllers/API/queries/folders";
@ -11,7 +10,6 @@ import { useQueryClient } from "@tanstack/react-query";
import { useEffect, useState } from "react";
import { Outlet } from "react-router-dom";
import ModalsComponent from "../components/modalsComponent";
import useFileDrop from "../hooks/use-on-file-drop";
import EmptyPage from "./emptyPage";
export default function CollectionPage(): JSX.Element {
@ -21,7 +19,6 @@ export default function CollectionPage(): JSX.Element {
const navigate = useCustomNavigate();
const flows = useFlowsManagerStore((state) => state.flows);
const examples = useFlowsManagerStore((state) => state.examples);
const handleFileDrop = useFileDrop("flow");
const setSuccessData = useAlertStore((state) => state.setSuccessData);
const setErrorData = useAlertStore((state) => state.setErrorData);
const folderToEdit = useFolderStore((state) => state.folderToEdit);
@ -70,6 +67,9 @@ export default function CollectionPage(): JSX.Element {
setFolderToEdit(item);
setOpenDeleteFolderModal(true);
}}
handleFilesClick={() => {
navigate("files");
}}
/>
)}
<main className="flex h-full w-full overflow-hidden">
@ -77,16 +77,11 @@ export default function CollectionPage(): JSX.Element {
<div
className={`relative mx-auto flex h-full w-full flex-col overflow-hidden`}
>
<CardsWrapComponent
onFileDrop={handleFileDrop}
dragMessage={`Drop your file(s) here`}
>
{flows?.length !== examples?.length || folders?.length > 1 ? (
<Outlet />
) : (
<EmptyPage setOpenModal={setOpenModal} />
)}
</CardsWrapComponent>
{flows?.length !== examples?.length || folders?.length > 1 ? (
<Outlet />
) : (
<EmptyPage setOpenModal={setOpenModal} />
)}
</div>
) : (
<div className="flex h-full w-full items-center justify-center">

View file

@ -1,19 +1,11 @@
export const sortFlows = (flows, type) => {
const isComponent = type === "component";
const sortByDate = (a, b) => {
const sortByDateFn = (a, b) => {
const dateA = a?.updated_at || a?.date_created;
const dateB = b?.updated_at || b?.date_created;
if (dateA && dateB) {
return new Date(dateB).getTime() - new Date(dateA).getTime();
} else if (dateA) {
return 1;
} else if (dateB) {
return -1;
} else {
return 0;
}
return sortByDate(dateA, dateB);
};
const filteredFlows =
@ -21,5 +13,29 @@ export const sortFlows = (flows, type) => {
? flows
: flows?.filter((f) => (f?.is_component ?? false) === isComponent);
return filteredFlows?.sort(sortByDate) ?? [];
return filteredFlows?.sort(sortByDateFn) ?? [];
};
export const sortByDate = (dateA: string, dateB: string) => {
if (dateA && dateB) {
return new Date(dateB).getTime() - new Date(dateA).getTime();
} else if (dateA) {
return 1;
} else if (dateB) {
return -1;
} else {
return 0;
}
};
export const sortByBoolean = (a: boolean, b: boolean) => {
if (a && b) {
return 0;
} else if (a && !b) {
return -1;
} else if (!a && b) {
return 1;
} else {
return 0;
}
};

View file

@ -13,7 +13,10 @@ import { StoreGuard } from "./components/authorization/storeGuard";
import ContextWrapper from "./contexts";
import { CustomNavigate } from "./customization/components/custom-navigate";
import { BASENAME } from "./customization/config-constants";
import { ENABLE_CUSTOM_PARAM } from "./customization/feature-flags";
import {
ENABLE_CUSTOM_PARAM,
ENABLE_FILE_MANAGEMENT,
} from "./customization/feature-flags";
import { AppAuthenticatedPage } from "./pages/AppAuthenticatedPage";
import { AppInitPage } from "./pages/AppInitPage";
import { AppWrapperPage } from "./pages/AppWrapperPage";
@ -21,6 +24,7 @@ import { DashboardWrapperPage } from "./pages/DashboardWrapperPage";
import FlowPage from "./pages/FlowPage";
import LoginPage from "./pages/LoginPage";
import CollectionPage from "./pages/MainPage/pages";
import FilesPage from "./pages/MainPage/pages/filesPage";
import HomePage from "./pages/MainPage/pages/homePage";
import SettingsPage from "./pages/SettingsPage";
import ApiKeysPage from "./pages/SettingsPage/pages/ApiKeysPage";
@ -76,6 +80,9 @@ const router = createBrowserRouter(
index
element={<CustomNavigate replace to={"flows"} />}
/>
{ENABLE_FILE_MANAGEMENT && (
<Route path="files" element={<FilesPage />} />
)}
<Route
path="flows/"
element={<HomePage key="flows" type="flows" />}

View file

@ -1,17 +1,14 @@
import { INVALID_FILE_SIZE_ALERT } from "@/constants/alerts_constants";
import { useUtilityStore } from "@/stores/utilityStore";
const useFileSizeValidator = (
setErrorData: (newState: { title: string; list?: Array<string> }) => void,
) => {
import { formatFileSize } from "@/utils/stringManipulation";
const useFileSizeValidator = () => {
const maxFileSizeUpload = useUtilityStore((state) => state.maxFileSizeUpload);
const validateFileSize = (file) => {
if (file.size > maxFileSizeUpload) {
setErrorData({
title: INVALID_FILE_SIZE_ALERT(maxFileSizeUpload / 1024 / 1024),
});
return false;
throw new Error(
INVALID_FILE_SIZE_ALERT(formatFileSize(maxFileSizeUpload)),
);
}
return true;
};

View file

@ -12,7 +12,7 @@
--ag-selected-row-background-color: hsl(var(--accent)) !important;
--ag-menu-background-color: hsl(var(--accent)) !important;
--ag-panel-background-color: hsl(var(--accent)) !important;
--ag-row-hover-color: hsl(var(--primary-foreground)) !important;
--ag-row-hover-color: hsl(var(--accent)) !important;
--ag-header-height: 2.5rem !important;
}
@ -78,3 +78,24 @@
.ag-row {
cursor: pointer;
}
.ag-no-border .ag-root-wrapper {
border: none !important;
}
.ag-no-border .ag-row {
border-bottom: none !important;
}
.ag-no-border .ag-header {
margin-bottom: 0.6rem !important;
}
.ag-no-border .ag-paging-panel {
border-top: none !important;
}
.ag-no-border .ag-cell-focus:not(.ag-cell-inline-editing) {
border: 1px solid transparent !important;
box-shadow: none !important;
outline: none !important;
}

View file

@ -90,6 +90,7 @@ export type InputFieldType = {
[key: string]: any;
icon?: string;
text?: string;
temp_file?: boolean;
};
export type OutputFieldProxyType = {

View file

@ -0,0 +1,13 @@
export type FileType = {
id: string;
user_id: string;
provider: string;
name: string;
updated_at?: string;
path: string;
created_at: string;
size: number;
progress?: number;
file?: File;
type?: string;
};

View file

@ -137,6 +137,16 @@ export const getStatusColor = (status: string): string => {
return "";
};
export const formatFileSize = (bytes: number): string => {
if (bytes === 0) return "0 Bytes";
const k = 1024;
const sizes = ["B", "KB", "MB", "GB", "TB"];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
};
export const convertStringToHTML = (htmlString: string): JSX.Element => {
return React.createElement("span", {
dangerouslySetInnerHTML: { __html: sanitizeHTML(htmlString) },

Some files were not shown because too many files have changed in this diff Show more