diff --git a/README.md b/README.md
index 11d7c42aa..b07ec482d 100644
--- a/README.md
+++ b/README.md
@@ -31,16 +31,17 @@
- [Table of Contents](#table-of-contents)
- [📦 Installation](#-installation)
- [Locally](#locally)
+ - [HuggingFace Spaces](#huggingface-spaces)
- [🖥️ Command Line Interface (CLI)](#️-command-line-interface-cli)
- [Usage](#usage)
- [Environment Variables](#environment-variables)
- [Deployment](#deployment)
- - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
- - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
+ - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
+ - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
- [API Usage](#api-usage)
- - [🎨 Creating Flows](#-creating-flows)
- - [👋 Contributing](#-contributing)
- - [📄 License](#-license)
+- [🎨 Creating Flows](#-creating-flows)
+- [👋 Contributing](#-contributing)
+- [📄 License](#-license)
# 📦 Installation
@@ -61,6 +62,8 @@ or
langflow # or langflow --help
```
+### HuggingFace Spaces
+You can also check it out on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow) and run it in your browser! You can even clone it and have your own copy of Langflow to play with.
# 🖥️ Command Line Interface (CLI)
@@ -103,7 +106,7 @@ A sample `.env` file named `.env.example` is included with the project. Copy thi
# Deployment
-### Deploy Langflow on Google Cloud Platform
+## Deploy Langflow on Google Cloud Platform
Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
@@ -112,7 +115,7 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
-### Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
+## Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud.
@@ -219,8 +222,15 @@ print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
> Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
+## Deploy on Railway
+[](https://railway.app/template/Emy2sU?referralCode=MnPSdg)
-## 🎨 Creating Flows
+## Deploy on Render
+
+
+
+
+# 🎨 Creating Flows
Creating flows with Langflow is easy. Simply drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://langchain.readthedocs.io/en/latest/reference.html) to choose from, including LLMs, prompt serializers, agents, and chains.
@@ -239,7 +249,7 @@ flow("Hey, have you heard of Langflow?")
```
-## 👋 Contributing
+# 👋 Contributing
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible.
@@ -252,6 +262,6 @@ Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask question
[](https://star-history.com/#logspace-ai/langflow&Date)
-## 📄 License
+# 📄 License
Langflow is released under the MIT License. See the LICENSE file for details.
diff --git a/poetry.lock b/poetry.lock
index cac5bb037..c8e43837c 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -739,13 +739,13 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
[[package]]
name = "cohere"
-version = "4.12.1"
+version = "4.13.0"
description = ""
optional = false
python-versions = ">=3.7,<4.0"
files = [
- {file = "cohere-4.12.1-py3-none-any.whl", hash = "sha256:80d17ae928873cdf63883a338618e477de5c71b3d510d7891af7dfdabc25186e"},
- {file = "cohere-4.12.1.tar.gz", hash = "sha256:2e93a094757576d6c8d42e76363aa7841eb4166c5b0de8e5ed7272783982d2a4"},
+ {file = "cohere-4.13.0-py3-none-any.whl", hash = "sha256:ac8faf352c9e8794dfd05002ce52b7d4f6da8f47a20172c8640ed58e8dfd3f10"},
+ {file = "cohere-4.13.0.tar.gz", hash = "sha256:7c8e65aa4fc50fe6a9e8fe19d64e18b77d4250ec70845d480c68644d5a903253"},
]
[package.dependencies]
@@ -1332,22 +1332,22 @@ importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""}
[[package]]
name = "fastapi"
-version = "0.99.1"
+version = "0.100.0"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.7"
files = [
- {file = "fastapi-0.99.1-py3-none-any.whl", hash = "sha256:976df7bab51ac7beda9f68c4513b8c4490b5c1135c72aafd0a5ee4023ec5282e"},
- {file = "fastapi-0.99.1.tar.gz", hash = "sha256:ac78f717cd80d657bd183f94d33b9bda84aa376a46a9dab513586b8eef1dc6fc"},
+ {file = "fastapi-0.100.0-py3-none-any.whl", hash = "sha256:271662daf986da8fa98dc2b7c7f61c4abdfdccfb4786d79ed8b2878f172c6d5f"},
+ {file = "fastapi-0.100.0.tar.gz", hash = "sha256:acb5f941ea8215663283c10018323ba7ea737c571b67fc7e88e9469c7eb1d12e"},
]
[package.dependencies]
-pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
+pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<3.0.0"
starlette = ">=0.27.0,<0.28.0"
typing-extensions = ">=4.5.0"
[package.extras]
-all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
+all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
[[package]]
name = "filelock"
@@ -2826,20 +2826,20 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)",
[[package]]
name = "langchain"
-version = "0.0.219"
+version = "0.0.229"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
- {file = "langchain-0.0.219-py3-none-any.whl", hash = "sha256:1f08a00e622f1c75087d6013f34e82be3f8dd1859266eb583a0fd7bc045090cf"},
- {file = "langchain-0.0.219.tar.gz", hash = "sha256:842f8212939e5ac4005906d2215574ffb3e34d2fe28f5bc0f46eb3b28fb29c5d"},
+ {file = "langchain-0.0.229-py3-none-any.whl", hash = "sha256:a7ca79e4ab892756ede95d212bd42243303f91b172535cefd02b0b8965e4e7b7"},
+ {file = "langchain-0.0.229.tar.gz", hash = "sha256:ab1beac7f3fc1f06ab1a0b545ef0d47a3d5efef3b2b4c646aafaefc2eb3151d3"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
dataclasses-json = ">=0.5.7,<0.6.0"
-langchainplus-sdk = ">=0.0.17"
+langchainplus-sdk = ">=0.0.20,<0.0.21"
numexpr = ">=2.8.4,<3.0.0"
numpy = ">=1,<2"
openapi-schema-pydantic = ">=1.2,<2.0"
@@ -2850,27 +2850,27 @@ SQLAlchemy = ">=1.4,<3"
tenacity = ">=8.1.0,<9.0.0"
[package.extras]
-all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.3,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (==9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.6.2,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.1.dev3,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "octoai-sdk (>=0.1.1,<0.2.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
+all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3,<0.4)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.3,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.6.2,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.1.dev3,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=0.11.0,<0.12.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "octoai-sdk (>=0.1.1,<0.2.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0a20230509004)", "openai (>=0,<1)"]
-clarifai = ["clarifai (==9.1.0)"]
+clarifai = ["clarifai (>=9.1.0)"]
cohere = ["cohere (>=3,<4)"]
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
embeddings = ["sentence-transformers (>=2,<3)"]
-extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "zep-python (>=0.31)"]
+extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "zep-python (>=0.32)"]
javascript = ["esprima (>=4.0.1,<5.0.0)"]
-llms = ["anthropic (>=0.2.6,<0.3.0)", "clarifai (==9.1.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.6)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
+llms = ["anthropic (>=0.3,<0.4)", "clarifai (>=9.1.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.19)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
qdrant = ["qdrant-client (>=1.1.2,<2.0.0)"]
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
[[package]]
name = "langchain-serve"
-version = "0.0.54"
+version = "0.0.56"
description = "Langchain Serve - serve your langchain apps on Jina AI Cloud."
optional = true
python-versions = "*"
files = [
- {file = "langchain-serve-0.0.54.tar.gz", hash = "sha256:5cbc980886c81f3bac7ed3337adeb0b94fc9f3645e4501dd7f0702f90766bbaa"},
+ {file = "langchain-serve-0.0.56.tar.gz", hash = "sha256:47c1e0290aec07c7e6366bb1b6b866d7d3f5446fb296160ca1bf2ef522c33fac"},
]
[package.dependencies]
@@ -7552,4 +7552,4 @@ deploy = ["langchain-serve"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.11"
-content-hash = "e25e43fde8f96f57beab702ac4c51cb3e569b81f85c540a7b4b5fb7b6388d04e"
+content-hash = "3ef18bc73e595f6aa8c3ee4b4c9666f3328c601933aef1bf225b865f39504e3c"
diff --git a/pyproject.toml b/pyproject.toml
index 08bdaad65..c82e532f8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,14 +23,14 @@ langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.9,<3.11"
-fastapi = "^0.99.0"
+fastapi = "^0.100.0"
uvicorn = "^0.22.0"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^20.1.0"
-langchain = "^0.0.219"
+langchain = "^0.0.229"
openai = "^0.27.8"
pandas = "^2.0.0"
chromadb = "^0.3.21"
@@ -78,7 +78,7 @@ black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"
ruff = "^0.0.254"
-httpx = "^0.23.3"
+httpx = "*"
pytest = "^7.2.2"
types-requests = "^2.28.11"
requests = "^2.28.0"
diff --git a/render.yaml b/render.yaml
new file mode 100644
index 000000000..e67da9334
--- /dev/null
+++ b/render.yaml
@@ -0,0 +1,11 @@
+services:
+ # A Docker web service
+ - type: web
+ name: langflow
+ runtime: docker
+ plan: free
+ dockerfilePath: ./Dockerfile
+ repo: https://github.com/logspace-ai/langflow
+ branch: main
+ healthCheckPath: /health
+ autoDeploy: false
diff --git a/src/backend/langflow/api/v1/base.py b/src/backend/langflow/api/v1/base.py
index 420e1645f..71cac5412 100644
--- a/src/backend/langflow/api/v1/base.py
+++ b/src/backend/langflow/api/v1/base.py
@@ -60,12 +60,26 @@ INVALID_CHARACTERS = {
"}",
}
+INVALID_NAMES = {
+ "input_variables",
+ "output_parser",
+ "partial_variables",
+ "template",
+ "template_format",
+ "validate_template",
+}
+
def validate_prompt(template: str):
input_variables = extract_input_variables_from_prompt(template)
# Check if there are invalid characters in the input_variables
input_variables = check_input_variables(input_variables)
+ if any(var in INVALID_NAMES for var in input_variables):
+ raise ValueError(
+ f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. "
+ )
+
try:
PromptTemplate(template=template, input_variables=input_variables)
except Exception as exc:
diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/langflow/api/v1/callback.py
index 03f76543e..deddde47f 100644
--- a/src/backend/langflow/api/v1/callback.py
+++ b/src/backend/langflow/api/v1/callback.py
@@ -10,6 +10,7 @@ from fastapi import WebSocket
from langchain.schema import AgentAction, LLMResult, AgentFinish
+from langflow.utils.logger import logger
# https://github.com/hwchase17/chat-langchain/blob/master/callback.py
@@ -62,12 +63,36 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
async def on_tool_end(self, output: str, **kwargs: Any) -> Any:
"""Run when tool ends running."""
+ observation_prefix = kwargs.get("observation_prefix", "Tool output: ")
+ split_output = output.split()
+ first_word = split_output[0]
+ rest_of_output = split_output[1:]
+ # Create a formatted message.
+ intermediate_steps = f"{observation_prefix}{first_word}"
+
+ # Create a ChatResponse instance.
resp = ChatResponse(
message="",
type="stream",
- intermediate_steps=f"Tool output: {output}",
+ intermediate_steps=intermediate_steps,
)
- await self.websocket.send_json(resp.dict())
+ rest_of_resps = [
+ ChatResponse(
+ message="",
+ type="stream",
+ intermediate_steps=f"{word}",
+ )
+ for word in rest_of_output
+ ]
+ resps = [resp] + rest_of_resps
+ # Try to send the response, handle potential errors.
+
+ try:
+ # This is to emulate the stream of tokens
+ for resp in resps:
+ await self.websocket.send_json(resp.dict())
+ except Exception as e:
+ logger.error(e)
async def on_tool_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py
index 164dd2dd5..937eb2cf6 100644
--- a/src/backend/langflow/api/v1/chat.py
+++ b/src/backend/langflow/api/v1/chat.py
@@ -115,13 +115,6 @@ async def stream_build(flow_id: str):
number_of_nodes = len(graph.nodes)
flow_data_store[flow_id]["status"] = BuildStatus.IN_PROGRESS
- # To deal with the ZeroShotAgent case
- # we need to build the root node first
- # and then the rest of the graph
- # This is a big problem because certain nodes require
- # params that are not connected to it.
- # We should consider connecting the tools to the ZeroShotPrompt
- graph.build()
for i, vertex in enumerate(graph.generator_build(), 1):
try:
@@ -133,7 +126,7 @@ async def stream_build(flow_id: str):
params = vertex._built_object_repr()
valid = True
logger.debug(
- f"Building node {params[:50]}{'...' if len(params) > 50 else ''}"
+ f"Building node {str(params)[:50]}{'...' if len(str(params)) > 50 else ''}"
)
if vertex.artifacts:
# The artifacts will be prompt variables
diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py
index 856ee226e..33de784b5 100644
--- a/src/backend/langflow/chat/manager.py
+++ b/src/backend/langflow/chat/manager.py
@@ -104,8 +104,14 @@ class ChatManager:
async def close_connection(self, client_id: str, code: int, reason: str):
if websocket := self.active_connections[client_id]:
- await websocket.close(code=code, reason=reason)
- self.disconnect(client_id)
+ try:
+ await websocket.close(code=code, reason=reason)
+ self.disconnect(client_id)
+ except RuntimeError as exc:
+ # This is to catch the following error:
+ # Unexpected ASGI message 'websocket.close', after sending 'websocket.close'
+ if "after sending" in str(exc):
+ logger.error(exc)
async def process_message(
self, client_id: str, payload: Dict, langchain_object: Any
diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml
index dfe16531c..8934ff92f 100644
--- a/src/backend/langflow/config.yaml
+++ b/src/backend/langflow/config.yaml
@@ -277,9 +277,14 @@ vectorstores:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase"
MongoDBAtlasVectorSearch:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas"
+ # Requires docarray >=0.32.0 but langchain-serve requires jina 3.15.2 which doesn't support docarray >=0.32.0
+ # DocArrayInMemorySearch:
+ # documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/docarray_in_memory"
wrappers:
RequestsWrapper:
documentation: ""
+ SQLDatabase:
+ documentation: ""
output_parsers:
StructuredOutputParser:
documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured"
diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py
index 65fff3239..86a2f98a9 100644
--- a/src/backend/langflow/graph/graph/base.py
+++ b/src/backend/langflow/graph/graph/base.py
@@ -214,3 +214,10 @@ class Graph:
if node_type in node_types:
children.append(node)
return children
+
+ def __repr__(self):
+ node_ids = [node.id for node in self.nodes]
+ edges_repr = "\n".join(
+ [f"{edge.source.id} --> {edge.target.id}" for edge in self.edges]
+ )
+ return f"Graph:\nNodes: {node_ids}\nConnections:\n{edges_repr}"
diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py
index c7aef883a..4eb39e8e8 100644
--- a/src/backend/langflow/graph/vertex/base.py
+++ b/src/backend/langflow/graph/vertex/base.py
@@ -123,89 +123,119 @@ class Vertex:
self.params = params
def _build(self):
- # The params dict is used to build the module
- # it contains values and keys that point to nodes which
- # have their own params dict
- # When build is called, we iterate through the params dict
- # and if the value is a node, we call build on that node
- # and use the output of that build as the value for the param
- # if the value is not a node, then we use the value as the param
- # and continue
- # Another aspect is that the node_type is the class that we need to import
- # and instantiate with these built params
+ """
+ Initiate the build process.
+ """
logger.debug(f"Building {self.vertex_type}")
- # Build each node in the params dict
+ self._build_each_node_in_params_dict()
+ self._get_and_instantiate_class()
+ self._validate_built_object()
+
+ self._built = True
+
+ def _build_each_node_in_params_dict(self):
+ """
+ Iterates over each node in the params dictionary and builds it.
+ """
for key, value in self.params.copy().items():
- # Check if Node or list of Nodes and not self
- # to avoid recursion
- if isinstance(value, Vertex):
+ if self._is_node(value):
if value == self:
del self.params[key]
continue
- result = value.build()
- # If the key is "func", then we need to use the run method
- if key == "func":
- if not isinstance(result, types.FunctionType):
- # func can be
- # PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
- # so we need to check if there is an attribute called run
- if hasattr(result, "run"):
- result = result.run # type: ignore
- elif hasattr(result, "get_function"):
- result = result.get_function() # type: ignore
- elif inspect.iscoroutinefunction(result):
- self.params["coroutine"] = result
- else:
- # turn result which is a function into a coroutine
- # so that it can be awaited
- self.params["coroutine"] = sync_to_async(result)
- if isinstance(result, list):
- # If the result is a list, then we need to extend the list
- # with the result but first check if the key exists
- # if it doesn't, then we need to create a new list
- if isinstance(self.params[key], list):
- self.params[key].extend(result)
+ self._build_node_and_update_params(key, value)
+ elif isinstance(value, list) and self._is_list_of_nodes(value):
+ self._build_list_of_nodes_and_update_params(key, value)
- self.params[key] = result
- elif isinstance(value, list) and all(
- isinstance(node, Vertex) for node in value
- ):
- self.params[key] = []
- for node in value:
- built = node.build()
- if isinstance(built, list):
- self.params[key].extend(built)
- else:
- self.params[key].append(built)
+ def _is_node(self, value):
+ """
+ Checks if the provided value is an instance of Vertex.
+ """
+ return isinstance(value, Vertex)
- # Get the class from LANGCHAIN_TYPES_DICT
- # and instantiate it with the params
- # and return the instance
+ def _is_list_of_nodes(self, value):
+ """
+ Checks if the provided value is a list of Vertex instances.
+ """
+ return all(self._is_node(node) for node in value)
+ def _build_node_and_update_params(self, key, node):
+ """
+ Builds a given node and updates the params dictionary accordingly.
+ """
+ result = node.build()
+ self._handle_func(key, result)
+ if isinstance(result, list):
+ self._extend_params_list_with_result(key, result)
+ self.params[key] = result
+
+ def _build_list_of_nodes_and_update_params(self, key, nodes):
+ """
+ Iterates over a list of nodes, builds each and updates the params dictionary.
+ """
+ self.params[key] = []
+ for node in nodes:
+ built = node.build()
+ if isinstance(built, list):
+ self.params[key].extend(built)
+ else:
+ self.params[key].append(built)
+
+ def _handle_func(self, key, result):
+ """
+ Handles 'func' key by checking if the result is a function and setting it as coroutine.
+ """
+ if key == "func":
+ if not isinstance(result, types.FunctionType):
+ if hasattr(result, "run"):
+ result = result.run # type: ignore
+ elif hasattr(result, "get_function"):
+ result = result.get_function() # type: ignore
+ elif inspect.iscoroutinefunction(result):
+ self.params["coroutine"] = result
+ else:
+ self.params["coroutine"] = sync_to_async(result)
+
+ def _extend_params_list_with_result(self, key, result):
+ """
+ Extends a list in the params dictionary with the given result if it exists.
+ """
+ if isinstance(self.params[key], list):
+ self.params[key].extend(result)
+
+ def _get_and_instantiate_class(self):
+ """
+ Gets the class from a dictionary and instantiates it with the params.
+ """
+ if self.base_type is None:
+ raise ValueError(f"Base type for node {self.vertex_type} not found")
try:
- if self.base_type is None:
- raise ValueError(f"Base type for node {self.vertex_type} not found")
result = loading.instantiate_class(
node_type=self.vertex_type,
base_type=self.base_type,
params=self.params,
)
- # Result could be the _built_object or
- # (_built_object, dict) tuple
- if isinstance(result, tuple):
- self._built_object, self.artifacts = result
- else:
- self._built_object = result
+ self._update_built_object_and_artifacts(result)
except Exception as exc:
raise ValueError(
f"Error building node {self.vertex_type}: {str(exc)}"
) from exc
+ def _update_built_object_and_artifacts(self, result):
+ """
+ Updates the built object and its artifacts.
+ """
+ if isinstance(result, tuple):
+ self._built_object, self.artifacts = result
+ else:
+ self._built_object = result
+
+ def _validate_built_object(self):
+ """
+ Checks if the built object is None and raises a ValueError if so.
+ """
if self._built_object is None:
raise ValueError(f"Node type {self.vertex_type} not found")
- self._built = True
-
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py
index e6a1868db..0f09b0b20 100644
--- a/src/backend/langflow/graph/vertex/types.py
+++ b/src/backend/langflow/graph/vertex/types.py
@@ -1,3 +1,4 @@
+import ast
from typing import Any, Dict, List, Optional, Union
from langflow.graph.vertex.base import Vertex
@@ -79,7 +80,7 @@ class WrapperVertex(Vertex):
def build(self, force: bool = False) -> Any:
if not self._built or force:
if "headers" in self.params:
- self.params["headers"] = eval(self.params["headers"])
+ self.params["headers"] = ast.literal_eval(self.params["headers"])
self._build()
return self._built_object
@@ -93,7 +94,11 @@ class DocumentLoaderVertex(Vertex):
# show how many documents are in the list?
if self._built_object:
+ avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(
+ self._built_object
+ )
return f"""{self.vertex_type}({len(self._built_object)} documents)
+ \nAvg. Document Length (characters): {avg_length}
Documents: {self._built_object[:3]}..."""
return f"{self.vertex_type}()"
@@ -125,8 +130,13 @@ class TextSplitterVertex(Vertex):
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
+
if self._built_object:
+ avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(
+ self._built_object
+ )
return f"""{self.vertex_type}({len(self._built_object)} documents)
+ \nAvg. Document Length (characters): {avg_length}
\nDocuments: {self._built_object[:3]}..."""
return f"{self.vertex_type}()"
@@ -202,13 +212,28 @@ class PromptVertex(Vertex):
return self._built_object
def _built_object_repr(self):
- if self.artifacts and hasattr(self._built_object, "format"):
- # We'll build the prompt with the artifacts
- # to show the user what the prompt looks like
- # with the variables filled in
- return self._built_object.format(**self.artifacts)
- else:
+ if (
+ not self.artifacts
+ or self._built_object is None
+ or not hasattr(self._built_object, "format")
+ ):
return super()._built_object_repr()
+ # We'll build the prompt with the artifacts
+ # to show the user what the prompt looks like
+ # with the variables filled in
+ artifacts = self.artifacts.copy()
+ # Remove the handle_keys from the artifacts
+ # so the prompt format doesn't break
+ artifacts.pop("handle_keys", None)
+ try:
+ template = self._built_object.format(**artifacts)
+ return (
+ template
+ if isinstance(template, str)
+ else f"{self.vertex_type}({template})"
+ )
+ except KeyError:
+ return str(self._built_object)
class OutputParserVertex(Vertex):
diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py
index aa0cfb5db..b4e2b9bac 100644
--- a/src/backend/langflow/interface/agents/custom.py
+++ b/src/backend/langflow/interface/agents/custom.py
@@ -157,7 +157,7 @@ class VectorStoreAgent(CustomAgentExecutor):
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return AgentExecutor.from_agent_and_tools(
- agent=agent, tools=tools, verbose=True
+ agent=agent, tools=tools, verbose=True, handle_parsing_errors=True
)
def run(self, *args, **kwargs):
@@ -232,6 +232,7 @@ class SQLAgent(CustomAgentExecutor):
verbose=True,
max_iterations=15,
early_stopping_method="force",
+ handle_parsing_errors=True,
)
def run(self, *args, **kwargs):
@@ -276,7 +277,7 @@ class VectorStoreRouterAgent(CustomAgentExecutor):
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return AgentExecutor.from_agent_and_tools(
- agent=agent, tools=tools, verbose=True
+ agent=agent, tools=tools, verbose=True, handle_parsing_errors=True
)
def run(self, *args, **kwargs):
@@ -308,6 +309,7 @@ class InitializeAgent(CustomAgentExecutor):
agent=agent, # type: ignore
memory=memory,
return_intermediate_steps=True,
+ handle_parsing_errors=True,
)
def __init__(self, *args, **kwargs):
diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py
index ff7e1ee33..67d31308f 100644
--- a/src/backend/langflow/interface/chains/base.py
+++ b/src/backend/langflow/interface/chains/base.py
@@ -23,6 +23,7 @@ class ChainCreator(LangChainTypeCreator):
from_method_nodes = {
"ConversationalRetrievalChain": "from_llm",
"LLMCheckerChain": "from_llm",
+ "SQLDatabaseChain": "from_llm",
}
@property
diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py
index bfcd18caa..04ee8aba1 100644
--- a/src/backend/langflow/interface/importing/utils.py
+++ b/src/backend/langflow/interface/importing/utils.py
@@ -11,6 +11,7 @@ from langchain.chat_models.base import BaseChatModel
from langchain.tools import BaseTool
from langflow.interface.custom.custom import CustomComponent
from langflow.utils import validate
+from langflow.interface.wrappers.base import wrapper_creator
def import_module(module_path: str) -> Any:
@@ -103,7 +104,11 @@ def import_prompt(prompt: str) -> Type[PromptTemplate]:
def import_wrapper(wrapper: str) -> Any:
"""Import wrapper from wrapper name"""
- return import_module(f"from langchain.requests import {wrapper}")
+ if (
+ isinstance(wrapper_creator.type_dict, dict)
+ and wrapper in wrapper_creator.type_dict
+ ):
+ return wrapper_creator.type_dict.get(wrapper)
def import_toolkit(toolkit: str) -> Any:
diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py
index 2ec59e3b5..9ec149ec9 100644
--- a/src/backend/langflow/interface/initialize/loading.py
+++ b/src/backend/langflow/interface/initialize/loading.py
@@ -1,3 +1,4 @@
+import contextlib
import json
from typing import Any, Callable, Dict, List, Sequence, Type
@@ -24,6 +25,7 @@ from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
+from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.utils import load_file_into_dict
from langflow.utils import validate
from langchain.chains.base import Chain
@@ -70,7 +72,11 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
elif base_type == "prompts":
return instantiate_prompt(node_type, class_object, params)
elif base_type == "tools":
- return instantiate_tool(node_type, class_object, params)
+ tool = instantiate_tool(node_type, class_object, params)
+ if hasattr(tool, "name") and isinstance(tool, BaseTool):
+ # tool name shouldn't contain spaces
+ tool.name = tool.name.replace(" ", "_")
+ return tool
elif base_type == "toolkits":
return instantiate_toolkit(node_type, class_object, params)
elif base_type == "embeddings":
@@ -95,6 +101,8 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
return instantiate_memory(node_type, class_object, params)
elif base_type == "custom_components":
return instantiate_custom_component(node_type, class_object, params)
+ elif base_type == "wrappers":
+ return instantiate_wrapper(node_type, class_object, params)
else:
return class_object(**params)
@@ -104,6 +112,15 @@ def instantiate_custom_component(node_type, class_object, params):
return class_object().build(**params)
+def instantiate_wrapper(node_type, class_object, params):
+ if node_type in wrapper_creator.from_method_nodes:
+ method = wrapper_creator.from_method_nodes[node_type]
+ if class_method := getattr(class_object, method, None):
+ return class_method(**params)
+ raise ValueError(f"Method {method} not found in {class_object}")
+ return class_object(**params)
+
+
def instantiate_output_parser(node_type, class_object, params):
if node_type in output_parser_creator.from_method_nodes:
method = output_parser_creator.from_method_nodes[node_type]
@@ -119,12 +136,21 @@ def instantiate_llm(node_type, class_object, params: Dict):
# False if condition is True
if node_type == "VertexAI":
return initialize_vertexai(class_object=class_object, params=params)
+ # max_tokens sometimes is a string and should be an int
+ if "max_tokens" in params:
+ if isinstance(params["max_tokens"], str) and params["max_tokens"].isdigit():
+ params["max_tokens"] = int(params["max_tokens"])
+ elif not isinstance(params.get("max_tokens"), int):
+ params.pop("max_tokens", None)
return class_object(**params)
def instantiate_memory(node_type, class_object, params):
# process input_key and output_key to remove them if
# they are empty strings
+ if node_type == "ConversationEntityMemory":
+ params.pop("memory_key", None)
+
for key in ["input_key", "output_key"]:
if key in params and (params[key] == "" or not params[key]):
params.pop(key)
@@ -178,8 +204,7 @@ def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params:
agent = class_method(**params)
tools = params.get("tools", [])
return AgentExecutor.from_agent_and_tools(
- agent=agent,
- tools=tools,
+ agent=agent, tools=tools, handle_parsing_errors=True
)
return load_agent_executor(class_object, params)
@@ -189,7 +214,7 @@ def instantiate_prompt(node_type, class_object, params: Dict):
if "tools" not in params:
params["tools"] = []
return ZeroShotAgent.create_prompt(**params)
- if "MessagePromptTemplate" in node_type:
+ elif "MessagePromptTemplate" in node_type:
# Then we only need the template
from_template_params = {
"template": params.pop("prompt", params.pop("template", ""))
@@ -197,12 +222,12 @@ def instantiate_prompt(node_type, class_object, params: Dict):
if not from_template_params.get("template"):
raise ValueError("Prompt template is required")
- return class_object.from_template(**from_template_params)
+ prompt = class_object.from_template(**from_template_params)
- if node_type == "ChatPromptTemplate":
- return class_object.from_messages(**params)
-
- prompt = class_object(**params)
+ elif node_type == "ChatPromptTemplate":
+ prompt = class_object.from_messages(**params)
+ else:
+ prompt = class_object(**params)
format_kwargs: Dict[str, Any] = {}
for input_variable in prompt.input_variables:
@@ -214,18 +239,23 @@ def instantiate_prompt(node_type, class_object, params: Dict):
variable, "get_format_instructions"
):
format_kwargs[input_variable] = variable.get_format_instructions()
- # check if is a list of Document
elif isinstance(variable, List) and all(
isinstance(item, Document) for item in variable
):
# Format document to contain page_content and metadata
# as one string separated by a newline
- format_kwargs[input_variable] = "\n".join(
- [
- f"Document:{item.page_content}\nMetadata:{item.metadata}"
- for item in variable
- ]
- )
+ if len(variable) > 1:
+ content = "\n".join(
+ [item.page_content for item in variable if item.page_content]
+ )
+ else:
+ content = variable[0].page_content
+ # content could be a json list of strings
+ with contextlib.suppress(json.JSONDecodeError):
+ content = json.loads(content)
+ if isinstance(content, list):
+ content = ",".join([str(item) for item in content])
+ format_kwargs[input_variable] = content
# handle_keys will be a list but it does not exist yet
# so we need to create it
@@ -247,12 +277,14 @@ def instantiate_prompt(node_type, class_object, params: Dict):
def instantiate_tool(node_type, class_object: Type[BaseTool], params: Dict):
if node_type == "JsonSpec":
- params["dict_"] = load_file_into_dict(params.pop("path"))
+ if file_dict := load_file_into_dict(params.pop("path")):
+ params["dict_"] = file_dict
+ else:
+ raise ValueError("Invalid file")
return class_object(**params)
elif node_type == "PythonFunctionTool":
params["func"] = get_function(params.get("code"))
return class_object(**params)
- # For backward compatibility
elif node_type == "PythonFunction":
function_string = params["code"]
if isinstance(function_string, str):
@@ -354,6 +386,12 @@ def instantiate_textsplitter(
"separator_type" in params and params["separator_type"] == "Text"
) or "separator_type" not in params:
params.pop("separator_type", None)
+ # separators might come in as an escaped string like \\n
+ # so we need to convert it to a string
+ if "separators" in params:
+ params["separators"] = (
+ params["separators"].encode().decode("unicode-escape")
+ )
text_splitter = class_object(**params)
else:
from langchain.text_splitter import Language
@@ -406,6 +444,7 @@ def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs)
return AgentExecutor.from_agent_and_tools(
agent=agent,
tools=allowed_tools,
+ handle_parsing_errors=True,
# memory=memory,
**kwargs,
)
diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py
index 1ab2b4ce5..9203915cf 100644
--- a/src/backend/langflow/interface/utils.py
+++ b/src/backend/langflow/interface/utils.py
@@ -16,17 +16,15 @@ def load_file_into_dict(file_path: str) -> dict:
if not os.path.exists(file_path):
raise FileNotFoundError(f"File not found: {file_path}")
- file_extension = os.path.splitext(file_path)[1].lower()
-
- if file_extension == ".json":
- with open(file_path, "r") as json_file:
- data = json.load(json_file)
- elif file_extension in [".yaml", ".yml"]:
- with open(file_path, "r") as yaml_file:
- data = yaml.safe_load(yaml_file)
- else:
- raise ValueError("Unsupported file type. Please provide a JSON or YAML file.")
-
+ # Files names are UUID, so we can't find the extension
+ with open(file_path, "r") as file:
+ try:
+ data = json.load(file)
+ except json.JSONDecodeError:
+ file.seek(0)
+ data = yaml.safe_load(file)
+ except ValueError as exc:
+ raise ValueError("Invalid file type. Expected .json or .yaml.") from exc
return data
diff --git a/src/backend/langflow/interface/wrappers/base.py b/src/backend/langflow/interface/wrappers/base.py
index f5773d07a..77e38f921 100644
--- a/src/backend/langflow/interface/wrappers/base.py
+++ b/src/backend/langflow/interface/wrappers/base.py
@@ -1,25 +1,36 @@
from typing import Dict, List, Optional
-from langchain import requests
+from langchain import requests, sql_database
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.logger import logger
-from langflow.utils.util import build_template_from_class
+from langflow.utils.util import build_template_from_class, build_template_from_method
class WrapperCreator(LangChainTypeCreator):
type_name: str = "wrappers"
+ from_method_nodes = {"SQLDatabase": "from_uri"}
+
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = {
- wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper]
+ wrapper.__name__: wrapper
+ for wrapper in [requests.TextRequestsWrapper, sql_database.SQLDatabase]
}
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
try:
+ if name in self.from_method_nodes:
+ return build_template_from_method(
+ name,
+ type_to_cls_dict=self.type_to_loader_dict,
+ add_function=True,
+ method_name=self.from_method_nodes[name],
+ )
+
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Wrapper not found") from exc
diff --git a/src/backend/langflow/processing/base.py b/src/backend/langflow/processing/base.py
index 478b98816..f8690bbdf 100644
--- a/src/backend/langflow/processing/base.py
+++ b/src/backend/langflow/processing/base.py
@@ -19,8 +19,11 @@ async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwa
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = True
+ try:
+ fix_memory_inputs(langchain_object)
+ except Exception as exc:
+ logger.error(exc)
- fix_memory_inputs(langchain_object)
try:
async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)]
output = await langchain_object.acall(inputs, callbacks=async_callbacks)
@@ -39,7 +42,11 @@ async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwa
if isinstance(output, dict)
else output
)
- thought = format_actions(intermediate_steps) if intermediate_steps else ""
+ try:
+ thought = format_actions(intermediate_steps) if intermediate_steps else ""
+ except Exception as exc:
+ logger.exception(exc)
+ thought = ""
except Exception as exc:
logger.exception(exc)
raise ValueError(f"Error: {str(exc)}") from exc
diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py
index 1b219fe11..03e6e4c35 100644
--- a/src/backend/langflow/processing/process.py
+++ b/src/backend/langflow/processing/process.py
@@ -22,7 +22,10 @@ def fix_memory_inputs(langchain_object):
if not hasattr(langchain_object, "memory") or langchain_object.memory is None:
return
try:
- if langchain_object.memory.memory_key in langchain_object.input_variables:
+ if (
+ hasattr(langchain_object.memory, "memory_key")
+ and langchain_object.memory.memory_key in langchain_object.input_variables
+ ):
return
except AttributeError:
input_variables = (
@@ -92,6 +95,10 @@ def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = No
logger.debug("Loaded LangChain object")
if inputs is None:
inputs = {}
+
+ # Add artifacts to inputs
+ # artifacts can be documents loaded when building
+ # the flow
for (
key,
value,
@@ -113,8 +120,7 @@ def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = No
result = get_result_and_thought(langchain_object, inputs)
logger.debug("Generated result and thought")
elif isinstance(langchain_object, VectorStore):
- class_name = langchain_object.__class__.__name__
- result = {"message": f"Processed {class_name} successfully"}
+ result = langchain_object.search(**inputs)
else:
raise ValueError(
f"Unknown langchain_object type: {type(langchain_object).__name__}"
@@ -123,23 +129,23 @@ def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = No
def load_flow_from_json(
- input: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True
+ flow: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True
):
"""
Load flow from a JSON file or a JSON object.
- :param input: JSON file path or JSON object
+ :param flow: JSON file path or JSON object
:param tweaks: Optional tweaks to be processed
:param build: If True, build the graph, otherwise return the graph object
:return: Langchain object or Graph object depending on the build parameter
"""
# If input is a file path, load JSON from the file
- if isinstance(input, (str, Path)):
- with open(input, "r", encoding="utf-8") as f:
+ if isinstance(flow, (str, Path)):
+ with open(flow, "r", encoding="utf-8") as f:
flow_graph = json.load(f)
# If input is a dictionary, assume it's a JSON object
- elif isinstance(input, dict):
- flow_graph = input
+ elif isinstance(flow, dict):
+ flow_graph = flow
else:
raise TypeError(
"Input must be either a file path (str) or a JSON object (dict)"
diff --git a/src/backend/langflow/template/frontend_node/chains.py b/src/backend/langflow/template/frontend_node/chains.py
index ce8c1c62c..b678dec3b 100644
--- a/src/backend/langflow/template/frontend_node/chains.py
+++ b/src/backend/langflow/template/frontend_node/chains.py
@@ -81,7 +81,7 @@ class ChainFrontendNode(FrontendNode):
field.advanced = False
if field.name == "verbose":
field.required = False
- field.show = True
+ field.show = False
field.advanced = True
if field.name == "llm":
field.required = True
diff --git a/src/backend/langflow/template/frontend_node/constants.py b/src/backend/langflow/template/frontend_node/constants.py
index 90cdbf280..295995586 100644
--- a/src/backend/langflow/template/frontend_node/constants.py
+++ b/src/backend/langflow/template/frontend_node/constants.py
@@ -58,3 +58,7 @@ The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.
You can change this to use other APIs like JinaChat, LocalAI and Prem.
"""
+
+
+INPUT_KEY_INFO = """The variable to be used as Chat Input when more than one variable is available."""
+OUTPUT_KEY_INFO = """The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)"""
diff --git a/src/backend/langflow/template/frontend_node/memories.py b/src/backend/langflow/template/frontend_node/memories.py
index d98a322ff..374d36ff0 100644
--- a/src/backend/langflow/template/frontend_node/memories.py
+++ b/src/backend/langflow/template/frontend_node/memories.py
@@ -2,6 +2,7 @@ from typing import Optional
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
+from langflow.template.frontend_node.constants import INPUT_KEY_INFO, OUTPUT_KEY_INFO
from langflow.template.template.base import Template
from langchain.memory.chat_message_histories.postgres import DEFAULT_CONNECTION_STRING
from langchain.memory.chat_message_histories.mongodb import (
@@ -70,11 +71,15 @@ class MemoryFrontendNode(FrontendNode):
field.required = False
field.show = True
field.advanced = False
- if field.name in ["input_key", "output_key"]:
+ if field.name in {"input_key", "output_key"}:
field.required = False
field.show = True
field.advanced = False
field.value = ""
+ field.info = (
+ INPUT_KEY_INFO if field.name == "input_key" else OUTPUT_KEY_INFO
+ )
+
if field.name == "memory_key":
field.value = "chat_history"
if field.name == "chat_memory":
@@ -84,9 +89,10 @@ class MemoryFrontendNode(FrontendNode):
if field.name == "url":
field.show = True
if field.name == "entity_store":
- field.show = True
- if name == "SQLiteEntityStore":
- field.show = True
+ field.show = False
+ if name == "ConversationEntityMemory" and field.name == "memory_key":
+ field.show = False
+ field.required = False
class PostgresChatMessageHistoryFrontendNode(MemoryFrontendNode):
diff --git a/src/backend/langflow/template/frontend_node/textsplitters.py b/src/backend/langflow/template/frontend_node/textsplitters.py
index 04700f1c4..1d5549042 100644
--- a/src/backend/langflow/template/frontend_node/textsplitters.py
+++ b/src/backend/langflow/template/frontend_node/textsplitters.py
@@ -45,7 +45,7 @@ class TextSplittersFrontendNode(FrontendNode):
field_type="str",
required=True,
show=True,
- value=".",
+ value="\\n",
name=name,
display_name="Separator",
)
diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/langflow/template/frontend_node/tools.py
index 16e6955aa..d23033b35 100644
--- a/src/backend/langflow/template/frontend_node/tools.py
+++ b/src/backend/langflow/template/frontend_node/tools.py
@@ -112,7 +112,7 @@ class PythonFunctionToolNode(FrontendNode):
],
)
description: str = "Python function to be executed."
- base_classes: list[str] = ["Tool"]
+ base_classes: list[str] = ["BaseTool", "Tool"]
def to_dict(self):
return super().to_dict()
diff --git a/src/frontend/index.html b/src/frontend/index.html
index 426983565..8c21f0124 100644
--- a/src/frontend/index.html
+++ b/src/frontend/index.html
@@ -5,7 +5,7 @@
-
+
Langflow
diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx
index 06b7ebaa8..4cd0c8518 100644
--- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx
+++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx
@@ -12,10 +12,12 @@ import IntComponent from "../../../../components/intComponent";
import PromptAreaComponent from "../../../../components/promptComponent";
import TextAreaComponent from "../../../../components/textAreaComponent";
import ToggleShadComponent from "../../../../components/toggleShadComponent";
+import { MAX_LENGTH_TO_SCROLL_TOOLTIP } from "../../../../constants";
import { PopUpContext } from "../../../../contexts/popUpContext";
import { TabsContext } from "../../../../contexts/tabsContext";
import { typesContext } from "../../../../contexts/typesContext";
import { ParameterComponentType } from "../../../../types/components";
+import { cleanEdges } from "../../../../util/reactflowUtils";
import {
classNames,
getRandomKeyByssmm,
@@ -41,6 +43,7 @@ export default function ParameterComponent({
}: ParameterComponentType) {
const ref = useRef(null);
const refHtml = useRef(null);
+ const refNumberComponents = useRef(0);
const infoHtml = useRef(null);
const updateNodeInternals = useUpdateNodeInternals();
const [position, setPosition] = useState(0);
@@ -58,10 +61,6 @@ export default function ParameterComponent({
updateNodeInternals(data.id);
}, [data.id, position, updateNodeInternals]);
- const [enabled, setEnabled] = useState(
- data.node.template[name]?.value ?? false
- );
-
useEffect(() => {}, [closePopUp, data.node.template]);
const { reactFlowInstance } = useContext(typesContext);
@@ -99,6 +98,8 @@ export default function ParameterComponent({
useEffect(() => {
const groupedObj = groupByFamily(myData, tooltipTitle, left, data.type);
+ refNumberComponents.current = groupedObj[0]?.type?.length;
+
refHtml.current = groupedObj.map((item, i) => {
const Icon: any = nodeIconsLucide[item.family];
@@ -127,7 +128,7 @@ export default function ParameterComponent({
{nodeNames[item.family] ?? ""}{" "}
{" "}
- {item.type == "" ? "" : " - "}
+ {item.type === "" ? "" : " - "}
{item.type.split(", ").length > 2
? item.type.split(", ").map((el, i) => (
@@ -160,7 +161,7 @@ export default function ParameterComponent({
}
>
{title}
- {required ? " *" : ""}
+ {required ? " *" : ""}
{info !== "" && (
@@ -181,7 +182,11 @@ export default function ParameterComponent({
<>>
) : (
MAX_LENGTH_TO_SCROLL_TOOLTIP
+ ? "tooltip-fixed-width custom-scroll overflow-y-scroll nowheel"
+ : "tooltip-fixed-width"
+ }
delayDuration={0}
content={refHtml.current}
side={left ? "left" : "right"}
@@ -222,7 +227,7 @@ export default function ParameterComponent({
/>
) : data.node.template[name].multiline ? (
@@ -240,10 +245,9 @@ export default function ParameterComponent({
{
handleOnNewValue(t);
- setEnabled(t);
}}
size="large"
/>
@@ -309,6 +313,15 @@ export default function ParameterComponent({
field_name={name}
setNodeClass={(nodeClass) => {
data.node = nodeClass;
+ if (reactFlowInstance) {
+ cleanEdges({
+ flow: {
+ edges: reactFlowInstance.getEdges(),
+ nodes: reactFlowInstance.getNodes(),
+ },
+ updateEdge: (edge) => reactFlowInstance.setEdges(edge),
+ });
+ }
}}
nodeClass={data.node}
disabled={disabled}
diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx
index 5a727106b..ee62461bd 100644
--- a/src/frontend/src/CustomNodes/GenericNode/index.tsx
+++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx
@@ -114,7 +114,9 @@ export default function GenericNode({
Building...
+ ) : !validationStatus ? (
Build{" "}
Notifications
diff --git a/src/frontend/src/alerts/error/index.tsx b/src/frontend/src/alerts/error/index.tsx
index ed395018e..3824c3e87 100644
--- a/src/frontend/src/alerts/error/index.tsx
+++ b/src/frontend/src/alerts/error/index.tsx
@@ -20,6 +20,7 @@ export default function ErrorAlert({
}, 5000);
}
}, [id, removeAlert, show]);
+
return (
{title}
- {list.length !== 0 ? (
+ {list?.length !== 0 &&
+ list?.some((item) => item !== null && item !== undefined) ? (
{list.map((item, index) => (
diff --git a/src/frontend/src/components/AccordionComponent/index.tsx b/src/frontend/src/components/AccordionComponent/index.tsx
index ceeb938a8..3b0ddd596 100644
--- a/src/frontend/src/components/AccordionComponent/index.tsx
+++ b/src/frontend/src/components/AccordionComponent/index.tsx
@@ -13,7 +13,7 @@ export default function AccordionComponent({
open = [],
}: AccordionComponentType) {
const [value, setValue] = useState(
- open.length == 0 ? "" : getOpenAccordion()
+ open.length === 0 ? "" : getOpenAccordion()
);
function getOpenAccordion() {
diff --git a/src/frontend/src/components/SanitizedHTMLWrapper/index.tsx b/src/frontend/src/components/SanitizedHTMLWrapper/index.tsx
new file mode 100644
index 000000000..e76105c00
--- /dev/null
+++ b/src/frontend/src/components/SanitizedHTMLWrapper/index.tsx
@@ -0,0 +1,21 @@
+import DOMPurify from "dompurify";
+
+const SanitizedHTMLWrapper = ({
+ className,
+ content,
+ onClick,
+ suppressWarning = false,
+}) => {
+ const sanitizedHTML = DOMPurify.sanitize(content);
+
+ return (
+
+ );
+};
+
+export default SanitizedHTMLWrapper;
diff --git a/src/frontend/src/components/ShadTooltipComponent/index.tsx b/src/frontend/src/components/ShadTooltipComponent/index.tsx
index 3fe82afd7..89f301e60 100644
--- a/src/frontend/src/components/ShadTooltipComponent/index.tsx
+++ b/src/frontend/src/components/ShadTooltipComponent/index.tsx
@@ -1,33 +1,26 @@
import { ShadToolTipType } from "../../types/components";
-import {
- Tooltip,
- TooltipContent,
- TooltipProvider,
- TooltipTrigger,
-} from "../ui/tooltip";
+import { Tooltip, TooltipContent, TooltipTrigger } from "../ui/tooltip";
export default function ShadTooltip({
content,
side,
asChild = true,
children,
- style,
+ styleClasses,
delayDuration = 500,
}: ShadToolTipType) {
return (
-
-
- {children}
+
+ {children}
-
- {content}
-
-
-
+
+ {content}
+
+
);
}
diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx
index 829dd5b13..58c058866 100644
--- a/src/frontend/src/components/chatComponent/index.tsx
+++ b/src/frontend/src/components/chatComponent/index.tsx
@@ -72,7 +72,7 @@ export default function Chat({ flow }: ChatType) {
}
prevNodesRef.current = currentNodes;
- }, [tabsState]);
+ }, [tabsState, flow.id]);
return (
<>
@@ -83,9 +83,17 @@ export default function Chat({ flow }: ChatType) {
setIsBuilt={setIsBuilt}
isBuilt={isBuilt}
/>
- {isBuilt && canOpen && (
-
- )}
+ {isBuilt &&
+ tabsState[flow.id] &&
+ tabsState[flow.id].formKeysData &&
+ canOpen && (
+
+ )}
diff --git a/src/frontend/src/components/headerComponent/components/menuBar/index.tsx b/src/frontend/src/components/headerComponent/components/menuBar/index.tsx
index a1536f13d..b362307e7 100644
--- a/src/frontend/src/components/headerComponent/components/menuBar/index.tsx
+++ b/src/frontend/src/components/headerComponent/components/menuBar/index.tsx
@@ -14,7 +14,6 @@ import {
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuLabel,
- DropdownMenuSeparator,
DropdownMenuTrigger,
} from "../../../ui/dropdown-menu";
@@ -52,13 +51,11 @@ export const MenuBar = ({ flows, tabId }) => {
-
- {current_flow.name}
-
+
+
+
{current_flow.name}
+
+
@@ -99,7 +96,7 @@ export const MenuBar = ({ flows, tabId }) => {
Redo
-
+ {/* */}
{/* Projects */}
{/*
diff --git a/src/frontend/src/components/inputComponent/index.tsx b/src/frontend/src/components/inputComponent/index.tsx
index 4d6045caf..8da5c428e 100644
--- a/src/frontend/src/components/inputComponent/index.tsx
+++ b/src/frontend/src/components/inputComponent/index.tsx
@@ -39,11 +39,13 @@ export default function InputComponent({
if (disableCopyPaste) setDisableCopyPaste(false);
}}
className={classNames(
- " pr-9 ",
disabled ? " input-disable " : "",
- password && !pwdVisible && myValue !== "" ? "password" : "",
+ password && !pwdVisible && myValue !== ""
+ ? " text-clip password "
+ : "",
editNode ? " input-edit-node " : " input-primary ",
- password && editNode ? "pr-8" : "pr-3"
+ password && editNode ? "pr-8" : "",
+ password && !editNode ? "pr-10" : ""
)}
placeholder={password && editNode ? "Key" : "Type something..."}
onChange={(e) => {
diff --git a/src/frontend/src/components/inputFileComponent/index.tsx b/src/frontend/src/components/inputFileComponent/index.tsx
index fc69e5f95..7316566ae 100644
--- a/src/frontend/src/components/inputFileComponent/index.tsx
+++ b/src/frontend/src/components/inputFileComponent/index.tsx
@@ -98,10 +98,10 @@ export default function InputFileComponent({
onClick={handleButtonClick}
className={
editNode
- ? " input-edit-node " + " input-dialog "
- : (disabled ? " input-disable " : "") +
- " input-primary " +
- " input-dialog "
+ ? "input-edit-node input-dialog text-muted-foreground"
+ : disabled
+ ? "input-disable input-dialog input-primary"
+ : "input-dialog input-primary text-muted-foreground"
}
>
{myValue !== "" ? myValue : "No file"}
diff --git a/src/frontend/src/components/intComponent/index.tsx b/src/frontend/src/components/intComponent/index.tsx
index 2428d03ee..09c342712 100644
--- a/src/frontend/src/components/intComponent/index.tsx
+++ b/src/frontend/src/components/intComponent/index.tsx
@@ -30,7 +30,7 @@ export default function IntComponent({
{
diff --git a/src/frontend/src/components/promptComponent/index.tsx b/src/frontend/src/components/promptComponent/index.tsx
index 7385518c8..22384c299 100644
--- a/src/frontend/src/components/promptComponent/index.tsx
+++ b/src/frontend/src/components/promptComponent/index.tsx
@@ -4,7 +4,6 @@ import GenericModal from "../../modals/genericModal";
import { TextAreaComponentType } from "../../types/components";
import { TypeModal } from "../../utils";
-import * as _ from "lodash";
import { ExternalLink } from "lucide-react";
import { typesContext } from "../../contexts/typesContext";
import { postValidatePrompt } from "../../controllers/API";
@@ -18,7 +17,7 @@ export default function PromptAreaComponent({
disabled,
editNode = false,
}: TextAreaComponentType) {
- const [myValue, setMyValue] = useState("");
+ const [myValue, setMyValue] = useState(value);
const { openPopUp } = useContext(PopUpContext);
const { reactFlowInstance } = useContext(typesContext);
useEffect(() => {
@@ -29,84 +28,93 @@ export default function PromptAreaComponent({
}, [disabled, onChange]);
useEffect(() => {
- if (value !== "" && myValue !== value && reactFlowInstance) {
- // only executed once
- setMyValue(value);
- postValidatePrompt(field_name, value, nodeClass)
- .then((apiReturn) => {
- if (apiReturn.data) {
- setNodeClass(apiReturn.data.frontend_node);
- // need to update reactFlowInstance to re-render the nodes.
- reactFlowInstance.setEdges(
- _.cloneDeep(reactFlowInstance.getEdges())
- );
- }
- })
- .catch((error) => {});
+ setMyValue(value);
+ if (value !== "" && !editNode) {
+ postValidatePrompt(field_name, value, nodeClass).then((apiReturn) => {
+ if (apiReturn.data) {
+ setNodeClass(apiReturn.data.frontend_node);
+ // need to update reactFlowInstance to re-render the nodes.
+ }
+ });
}
- }, [reactFlowInstance, field_name]);
+ }, [value, reactFlowInstance]);
+
+ // useEffect(() => {
+ // if (value !== "" && myValue !== value && reactFlowInstance) {
+ // // only executed once
+ // setMyValue(value);
+ // postValidatePrompt(field_name, value, nodeClass)
+ // .then((apiReturn) => {
+ // if (apiReturn.data) {
+ // setNodeClass(apiReturn.data.frontend_node);
+ // // need to update reactFlowInstance to re-render the nodes.
+ // reactFlowInstance.setEdges(
+ // _.cloneDeep(reactFlowInstance.getEdges())
+ // );
+ // }
+ // })
+ // .catch((error) => {});
+ // }
+ // }, [reactFlowInstance, field_name, myValue, nodeClass, setNodeClass, value]);
return (
-
-
-
-
{
- openPopUp(
- {
- setMyValue(t);
- onChange(t);
- }}
- nodeClass={nodeClass}
- setNodeClass={setNodeClass}
- />
- );
- }}
- className={
- editNode
- ? " input-edit-node " + " input-dialog "
- : (disabled ? " input-disable " : "") +
- " input-primary " +
- " input-dialog "
- }
- >
- {myValue !== "" ? myValue : "Type your prompt here"}
-
-
{
- openPopUp(
- {
- setMyValue(t);
- onChange(t);
- }}
- nodeClass={nodeClass}
- setNodeClass={setNodeClass}
- />
- );
- }}
- >
- {!editNode && (
-
+
+ {
+ openPopUp(
+ {
+ setMyValue(t);
+ onChange(t);
+ }}
+ nodeClass={nodeClass}
+ setNodeClass={setNodeClass}
/>
- )}
-
-
+ );
+ }}
+ className={
+ editNode
+ ? "input-edit-node input-dialog"
+ : (disabled ? " input-disable text-ring " : "") +
+ " input-primary text-muted-foreground "
+ }
+ >
+ {myValue !== "" ? myValue : "Type your prompt here"}
+
+ {
+ openPopUp(
+ {
+ setMyValue(t);
+ onChange(t);
+ }}
+ nodeClass={nodeClass}
+ setNodeClass={setNodeClass}
+ />
+ );
+ }}
+ >
+ {!editNode && (
+
+ )}
+
);
diff --git a/src/frontend/src/components/textAreaComponent/index.tsx b/src/frontend/src/components/textAreaComponent/index.tsx
index a50969cfb..8083b4eb7 100644
--- a/src/frontend/src/components/textAreaComponent/index.tsx
+++ b/src/frontend/src/components/textAreaComponent/index.tsx
@@ -5,6 +5,7 @@ import { TextAreaComponentType } from "../../types/components";
import { TypeModal } from "../../utils";
import { ExternalLink } from "lucide-react";
+import { TabsContext } from "../../contexts/tabsContext";
export default function TextAreaComponent({
value,
@@ -14,6 +15,7 @@ export default function TextAreaComponent({
}: TextAreaComponentType) {
const [myValue, setMyValue] = useState(value);
const { openPopUp, closePopUp } = useContext(PopUpContext);
+ const { setDisableCopyPaste } = useContext(TabsContext);
useEffect(() => {
if (disabled) {
@@ -27,19 +29,20 @@ export default function TextAreaComponent({
}, [closePopUp]);
return (
-
-
+
+
{
+ setDisableCopyPaste(true);
+ }}
+ onBlur={() => {
+ setDisableCopyPaste(false);
+ }}
className={
editNode
- ? "input-edit-node"
- : "input-primary" + (disabled ? " input-disable " : "")
+ ? " input-edit-node "
+ : " input-primary " + (disabled ? " input-disable" : "")
}
placeholder={"Type something..."}
onChange={(e) => {
diff --git a/src/frontend/src/components/ui/separator.tsx b/src/frontend/src/components/ui/separator.tsx
index a770af987..1fa0287d8 100644
--- a/src/frontend/src/components/ui/separator.tsx
+++ b/src/frontend/src/components/ui/separator.tsx
@@ -17,7 +17,7 @@ const Separator = React.forwardRef<
decorative={decorative}
orientation={orientation}
className={cn(
- "shrink-0 bg-border",
+ "shrink-0 bg-ring/40",
orientation === "horizontal" ? "h-[1px] w-full" : "h-full w-[1px]",
className
)}
diff --git a/src/frontend/src/components/ui/tooltip.tsx b/src/frontend/src/components/ui/tooltip.tsx
index dc847052e..6bb983652 100644
--- a/src/frontend/src/components/ui/tooltip.tsx
+++ b/src/frontend/src/components/ui/tooltip.tsx
@@ -14,15 +14,17 @@ const TooltipContent = React.forwardRef<
React.ElementRef
,
React.ComponentPropsWithoutRef
>(({ className, sideOffset = 4, ...props }, ref) => (
-
+
+
+
));
TooltipContent.displayName = TooltipPrimitive.Content.displayName;
diff --git a/src/frontend/src/constants.tsx b/src/frontend/src/constants.tsx
index bb54427f9..108b6780b 100644
--- a/src/frontend/src/constants.tsx
+++ b/src/frontend/src/constants.tsx
@@ -5,6 +5,12 @@ import { FlowType } from "./types/flow";
import { TabsState } from "./types/tabs";
import { buildInputs, buildTweaks } from "./utils";
+/**
+ * Number maximum of components to scroll on tooltips
+ * @constant
+ */
+export const MAX_LENGTH_TO_SCROLL_TOOLTIP = 200;
+
/**
* The base text for subtitle of Export Dialog (Toolbar)
* @constant
@@ -545,10 +551,3 @@ export const NOUNS: string[] = [
*
*/
export const USER_PROJECTS_HEADER = "My Collection";
-/**
- * CSS for highlight HTML
- * @constant
- *
- */
-export const HIGHLIGH_CSS =
- "block pl-3 pr-14 py-2 w-full h-full text-sm outline-0 border-0 break-all overflow-y-hidden max-w-[75vw]";
diff --git a/src/frontend/src/contexts/index.tsx b/src/frontend/src/contexts/index.tsx
index 13771673c..70b7da07c 100644
--- a/src/frontend/src/contexts/index.tsx
+++ b/src/frontend/src/contexts/index.tsx
@@ -1,5 +1,6 @@
import { ReactNode } from "react";
import { ReactFlowProvider } from "reactflow";
+import { TooltipProvider } from "../components/ui/tooltip";
import { SSEProvider } from "./SSEContext";
import { AlertProvider } from "./alertContext";
import { DarkProvider } from "./darkContext";
@@ -13,23 +14,25 @@ export default function ContextWrapper({ children }: { children: ReactNode }) {
//element to wrap all context
return (
<>
-
-
-
-
-
-
-
-
- {children}
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+ {children}
+
+
+
+
+
+
+
+
+
>
);
}
diff --git a/src/frontend/src/contexts/tabsContext.tsx b/src/frontend/src/contexts/tabsContext.tsx
index 001152c9c..a7ddf11f8 100644
--- a/src/frontend/src/contexts/tabsContext.tsx
+++ b/src/frontend/src/contexts/tabsContext.tsx
@@ -54,7 +54,7 @@ const TabsContextInitialValue: TabsContextType = {
setTabsState: (state: TabsState) => {},
getNodeId: (nodeType: string) => "",
setTweak: (tweak: any) => {},
- getTweak: {},
+ getTweak: [],
paste: (
selection: { nodes: any; edges: any },
position: { x: number; y: number; paneX?: number; paneY?: number }
@@ -75,7 +75,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
const { templates, reactFlowInstance } = useContext(typesContext);
const [lastCopiedSelection, setLastCopiedSelection] = useState(null);
const [tabsState, setTabsState] = useState({});
- const [getTweak, setTweak] = useState({});
+ const [getTweak, setTweak] = useState([]);
const newNodeId = useRef(uid());
function incrementNodeId() {
@@ -539,11 +539,10 @@ export function TabsProvider({ children }: { children: ReactNode }) {
const updateEdges = (edges) => {
edges.forEach((edge) => {
- edge.style = { stroke: "inherit" };
edge.className =
- edge.targetHandle.split("|")[0] === "Text"
+ (edge.targetHandle.split("|")[0] === "Text"
? "stroke-gray-800 "
- : "stroke-gray-900 ";
+ : "stroke-gray-900 ") + " stroke-connection";
edge.animated = edge.targetHandle.split("|")[0] === "Text";
});
};
diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts
index fdfe721a9..0c87f6a52 100644
--- a/src/frontend/src/controllers/API/index.ts
+++ b/src/frontend/src/controllers/API/index.ts
@@ -76,7 +76,7 @@ export async function postValidatePrompt(
*/
export async function getExamples(): Promise {
const url =
- "https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples";
+ "https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples?ref=fix_examples";
const response = await axios.get(url);
const jsonFiles = response.data.filter((file: any) => {
diff --git a/src/frontend/src/icons/Google/google.svg b/src/frontend/src/icons/Google/google.svg
index b518c5270..c599462cb 100644
--- a/src/frontend/src/icons/Google/google.svg
+++ b/src/frontend/src/icons/Google/google.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/src/frontend/src/icons/HuggingFace/index.tsx b/src/frontend/src/icons/HuggingFace/index.tsx
index 44fc68609..36599e0f6 100644
--- a/src/frontend/src/icons/HuggingFace/index.tsx
+++ b/src/frontend/src/icons/HuggingFace/index.tsx
@@ -1,7 +1,7 @@
import React, { forwardRef } from "react";
import { ReactComponent as HugginFaceSVG } from "./hf-logo.svg";
-export const HugginFaceIcon = forwardRef<
+export const HuggingFaceIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
diff --git a/src/frontend/src/index.css b/src/frontend/src/index.css
index 82c7f027c..c4950ec4e 100644
--- a/src/frontend/src/index.css
+++ b/src/frontend/src/index.css
@@ -6,8 +6,8 @@
/* TODO: Confirm that all colors here are found in tailwind config */
@layer base {
-
-:root {
+
+ :root {
--background: 0 0% 100%; /* hsl(0 0% 100%) */
--foreground: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
--muted: 210 40% 98%; /* hsl(210 40% 98%) */
@@ -27,36 +27,40 @@
--destructive: 0 100% 50%; /* hsl(0 100% 50%) */
--destructive-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
--radius: 0.5rem;
- --ring: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
-
+ --ring: 215 20.2% 65.1%; /* hsl(215 20% 65%) */
--round-btn-shadow: #00000063;
-
+
--error-background: #fef2f2;
--error-foreground: #991b1b;
-
+
--success-background: #f0fdf4;
--success-foreground: #14532d;
--info-background: #f0f4fd;
--info-foreground: #141653;
-
+
--high-indigo: #4338ca;
--medium-indigo: #6366f1;
+ --chat-bot-icon: #afe6ef;
+ --chat-user-icon: #aface9;
+
/* Colors that are shared in dark and light mode */
--blur-shared: #151923de;
--build-trigger: #dc735b;
--chat-trigger: #5c8be1;
- --chat-trigger-disabled: #9db7e8;
+ --chat-trigger-disabled: #b4c3da;
--status-red: #ef4444;
--status-yellow: #eab308;
--status-green: #4ade80;
--status-blue:#2563eb;
+ --connection: #555;
+
}
.dark {
--background: 224 35% 7.5%; /* hsl(224 40% 10%) */
- --foreground: 213 31% 85%; /* hsl(213 31% 91%) */
+ --foreground: 213 31% 80%; /* hsl(213 31% 91%) */
--muted: 223 27% 11%; /* hsl(223 27% 11%) */
--muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */
@@ -70,13 +74,13 @@
--border: 216 24% 17%; /* hsl(216 34% 17%) */
--input: 216 24% 17%; /* hsl(216 34% 17%) */
- --primary: 210 20% 80%; /* hsl(210 40% 98%) */
+ --primary: 210 20% 80%; /* hsl(210 20% 80%) */
--primary-foreground: 222.2 27.4% 1.2%; /* hsl(222 47% 1%) */
- --secondary: 222.2 47.4% 11.2%; /* hsl(222 47% 11%) */
- --secondary-foreground: 210 40% 98%; /* hsl(210 40% 98%) */
+ --secondary: 222.2 37.4% 7.2%; /* hsl(222 47% 11%) */
+ --secondary-foreground: 210 40% 80%; /* hsl(210 40% 80%) */
- --accent: 216 24% 17%; /* hsl(216 34% 17%) */
+ --accent: 216 24% 20%; /* hsl(216 34% 17%) */
--accent-foreground: 210 30% 98%; /* hsl(210 40% 98%) */
--destructive: 0 63% 31%; /* hsl(0 63% 31%) */
@@ -87,7 +91,7 @@
--radius: 0.5rem;
--round-btn-shadow: #00000063;
-
+
--success-background: #022c22;
--success-foreground: #ecfdf5;
@@ -105,11 +109,13 @@
--blur-shared: #151923d2;
--build-trigger: #dc735b;
--chat-trigger: #5c8be1;
- --chat-trigger-disabled: #2b4470;
+ --chat-trigger-disabled: #2d3b54;
--status-red: #ef4444;
--status-yellow: #eab308;
--status-green: #4ade80;
--status-blue: #2563eb;
+ --connection: #555;
+
}}
@layer base {
@@ -205,6 +211,7 @@ The cursor: default; property value restores the browser's default cursor style
.input-primary {
@apply bg-background block border-ring form-input px-3 placeholder:text-muted-foreground rounded-md shadow-sm sm:text-sm truncate w-full;
}
+
.input-edit-node{
@apply input-primary placeholder:text-center pt-0.5 pb-0.5 text-center
}
@@ -212,7 +219,7 @@ The cursor: default; property value restores the browser's default cursor style
@apply input-primary pr-7 mx-2
}
.input-disable{
- @apply bg-input
+ @apply bg-border placeholder:text-ring border-transparent
}
.input-dialog{
@apply text-ring cursor-pointer bg-transparent
@@ -545,7 +552,7 @@ The cursor: default; property value restores the browser's default cursor style
@apply flex items-center gap-0.5 rounded-md px-1.5 py-1 text-sm font-medium
}
.header-menu-bar-display {
- @apply flex max-w-[200px] items-center gap-2
+ @apply flex max-w-[200px] items-center gap-2 cursor-pointer
}
.header-menu-flow-name {
@apply flex-1 truncate
@@ -913,5 +920,172 @@ The cursor: default; property value restores the browser's default cursor style
@apply ml-3 h-6 w-6
}
+ .form-modal-lock-true {
+ @apply bg-input text-primary
+ }
+ .form-modal-no-input {
+ @apply bg-input text-center text-primary dark:bg-gray-700 dark:text-gray-300
+ }
+ .form-modal-lock-false {
+ @apply bg-white text-primary
+ }
+ .code-highlight{
+ @apply block px-3 py-2 w-full h-full text-sm outline-0 border-0 break-all overflow-y-hidden
+ }
+ .form-modal-lockchat {
+ @apply form-input focus:ring-ring focus:border-ring block w-full rounded-md border-border p-4 pr-16 custom-scroll sm:text-sm
+ }
+ .form-modal-send-icon-position {
+ @apply absolute bottom-2 right-4
+ }
+ .form-modal-send-button {
+ @apply rounded-md p-2 px-1 transition-all duration-300
+ }
+ .form-modal-lock-icon {
+ @apply ml-1 mr-1 h-5 w-5 animate-pulse
+ }
+ .form-modal-send-icon {
+ @apply mr-2 h-5 w-5 rotate-[44deg]
+ }
+ .form-modal-play-icon {
+ @apply h-5 w-5 mx-1
+ }
+ .form-modal-chat-position {
+ @apply flex-max-width px-2 py-6 pl-4 pr-9
+ }
+ .form-modal-chatbot-icon {
+ @apply mb-3 ml-3 mr-6 mt-1
+ }
+ .form-modal-chat-image {
+ @apply flex flex-col items-center gap-1
+ }
+ .form-modal-chat-img-box {
+ @apply relative flex h-8 w-8 items-center justify-center overflow-hidden rounded-md p-5 text-2xl
+ }
+ .form-modal-chat-bot-icon {
+ @apply form-modal-chat-img-box bg-chat-bot-icon
+ }
+ .form-modal-chat-user-icon {
+ @apply form-modal-chat-img-box bg-chat-user-icon
+ }
+ .form-modal-chat-icon-img {
+ @apply absolute scale-[60%]
+ }
+ .form-modal-chat-text-position {
+ @apply flex w-full flex-1 text-start
+ }
+ .form-modal-chat-text {
+ @apply relative flex w-full flex-col text-start text-sm font-normal text-muted-foreground
+ }
+ .form-modal-chat-icon-div {
+ @apply absolute -left-6 -top-3 cursor-pointer
+ }
+ .form-modal-chat-icon {
+ @apply h-4 w-4 animate-bounce
+ }
+ .form-modal-chat-thought-border {
+ @apply rounded-md border border-ring/60
+ }
+ .form-modal-chat-thought-size {
+ @apply inline-block h-full w-[95%]
+ }
+ .form-modal-chat-thought {
+ @apply cursor-pointer overflow-scroll bg-background text-start text-primary scrollbar-hide form-modal-chat-thought-border form-modal-chat-thought-size py-2 px-2
+ }
+ .form-modal-markdown-span {
+ @apply mt-1 animate-pulse cursor-default
+ }
+ .form-modal-initial-prompt-btn {
+ @apply mb-2 flex items-center gap-2 rounded-md border border-border bg-background shadow-sm px-4 py-2 text-sm font-semibold
+ }
+ .form-modal-iv-box {
+ @apply mt-2 flex-max-width h-[80vh]
+ }
+ .form-modal-iv-size {
+ @apply mr-6 flex h-full w-2/6 flex-col justify-start overflow-auto scrollbar-hide
+ }
+ .file-component-arrangement {
+ @apply flex items-center py-2
+ }
+ .file-component-variable {
+ @apply -ml-px mr-1 h-4 w-4 text-primary
+ }
+ .file-component-variables-span {
+ @apply font-semibold text-primary
+ }
+ .file-component-variables-title {
+ @apply flex items-center justify-between pt-2
+ }
+ .file-component-variables-div {
+ @apply mr-2.5 flex items-center
+ }
+ .file-component-variables-title-txt {
+ @apply text-sm font-medium text-primary
+ }
+ .file-component-accordion-div {
+ @apply flex items-start gap-3
+ }
+ .file-component-badge-div {
+ @apply flex-max-width items-center justify-between
+ }
+ .file-component-tab-column {
+ @apply flex flex-col gap-2 p-1
+ }
+ .tab-accordion-badge-div {
+ @apply flex flex-1 items-center justify-between py-4 text-sm font-normal text-muted-foreground transition-all
+ }
+ .eraser-column-arrangement {
+ @apply flex-max-width flex-1 flex-col
+ }
+ .eraser-size {
+ @apply relative flex h-full w-full flex-col rounded-md border bg-muted
+ }
+ .eraser-position {
+ @apply absolute right-3 top-3 z-50
+ }
+ .chat-message-div {
+ @apply flex-max-width h-full flex-col items-center overflow-scroll scrollbar-hide
+ }
+ .chat-alert-box {
+ @apply flex-max-width h-full flex-col items-center justify-center text-center align-middle
+ }
+ .langflow-chat-span {
+ @apply text-lg text-foreground
+ }
+ .langflow-chat-desc {
+ @apply w-2/4 rounded-md border border-border bg-muted px-6 py-8
+ }
+ .langflow-chat-desc-span {
+ @apply text-base text-muted-foreground
+ }
+ .langflow-chat-input-div {
+ @apply flex-max-width flex-col items-center justify-between px-8 pb-6
+ }
+ .langflow-chat-input {
+ @apply relative w-full rounded-md shadow-sm
+ }
-}
\ No newline at end of file
+ .tooltip-fixed-width{
+ @apply max-w-[30vw] max-h-[20vh] overflow-auto
+ }
+
+ .ace-editor-arrangement {
+ @apply flex-max-width h-full flex-col transition-all
+ }
+ .ace-editor {
+ @apply h-full w-full rounded-lg border-[1px] border-border custom-scroll
+ }
+ .ace-editor-save-btn {
+ @apply flex-max-width h-fit justify-end
+ }
+
+ .export-modal-save-api {
+ @apply font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70
+ }
+
+ .chat-message-highlight {
+ @apply px-0.5 rounded-md bg-indigo-100 dark:bg-indigo-900
+ }
+
+
+}
diff --git a/src/frontend/src/modals/ApiModal/index.tsx b/src/frontend/src/modals/ApiModal/index.tsx
index ea802cbad..f3f84a9ac 100644
--- a/src/frontend/src/modals/ApiModal/index.tsx
+++ b/src/frontend/src/modals/ApiModal/index.tsx
@@ -142,7 +142,8 @@ export default function ApiModal({ flow }: { flow: FlowType }) {
}
function startTweaks() {
- tweak?.current?.push(buildTweaks(flow));
+ const t = buildTweaks(flow);
+ tweak?.current?.push(t);
}
function filterNodes() {
@@ -266,7 +267,7 @@ export default function ApiModal({ flow }: { flow: FlowType }) {
return (
-
+
Code
@@ -382,10 +383,10 @@ export default function ApiModal({ flow }: { flow: FlowType }) {
key={i}
className="h-10 dark:border-b-muted"
>
-
+
{n}
-
+
{t.data.node.template[n]
.type === "str" &&
diff --git a/src/frontend/src/modals/EditNodeModal/index.tsx b/src/frontend/src/modals/EditNodeModal/index.tsx
index 2a5a686a0..7f614dd16 100644
--- a/src/frontend/src/modals/EditNodeModal/index.tsx
+++ b/src/frontend/src/modals/EditNodeModal/index.tsx
@@ -1,5 +1,5 @@
import { Variable } from "lucide-react";
-import { useContext, useEffect, useRef, useState } from "react";
+import { useContext, useRef, useState } from "react";
import CodeAreaComponent from "../../components/codeAreaComponent";
import Dropdown from "../../components/dropdownComponent";
import FloatComponent from "../../components/floatComponent";
@@ -30,6 +30,7 @@ import {
TableRow,
} from "../../components/ui/table";
import { PopUpContext } from "../../contexts/popUpContext";
+import { TabsContext } from "../../contexts/tabsContext";
import { typesContext } from "../../contexts/typesContext";
import { NodeDataType } from "../../types/flow";
import { classNames, limitScrollFieldsModal } from "../../utils";
@@ -54,7 +55,12 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
const { closePopUp } = useContext(PopUpContext);
const { types } = useContext(typesContext);
const ref = useRef();
- const [enabled, setEnabled] = useState(null);
+ const { setTabsState, tabId } = useContext(TabsContext);
+ const { reactFlowInstance } = useContext(typesContext);
+
+ let disabled =
+ reactFlowInstance?.getEdges().some((e) => e.targetHandle === data.id) ??
+ false;
if (nodeLength == 0) {
closePopUp();
}
@@ -66,18 +72,30 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
}
}
- useEffect(() => {}, [closePopUp, data.node.template]);
-
- function changeAdvanced(node): void {
- Object.keys(data.node.template).filter((n, i) => {
+ function changeAdvanced(node) {
+ Object.keys(data.node.template).map((n, i) => {
if (n === node.name) {
data.node.template[n].advanced = !data.node.template[n].advanced;
}
- return true;
+ return n;
});
setNodeValue(!nodeValue);
}
+ const handleOnNewValue = (newValue: any, name) => {
+ data.node.template[name].value = newValue;
+ // Set state to pending
+ setTabsState((prev) => {
+ return {
+ ...prev,
+ [tabId]: {
+ ...prev[tabId],
+ isPending: true,
+ },
+ };
+ });
+ };
+
return (
@@ -87,11 +105,13 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
{data.type}
ID: {data.id}
-
- {data.node?.description}
-
-
-
Parameters
+
+
+ {data.node?.description}
+
+
+ Parameters
+
@@ -145,7 +165,7 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
{data.node.template[n].list ? (
{
- data.node.template[n].value = t;
+ handleOnNewValue(t, n);
}}
/>
) : data.node.template[n].multiline ? (
{
- data.node.template[n].value = t;
+ handleOnNewValue(t, n);
}}
/>
) : (
{
- data.node.template[n].value = t;
+ handleOnNewValue(t, n);
}}
/>
)}
@@ -183,19 +203,18 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
{" "}
{
- data.node.template[n].value = e;
- setEnabled(e);
+ setEnabled={(t) => {
+ handleOnNewValue(t, n);
}}
size="small"
- disabled={false}
/>
) : data.node.template[n].type === "float" ? (
{
@@ -210,9 +229,7 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
numberOfOptions={nodeLength}
editNode={true}
options={data.node.template[n].options}
- onSelect={(newValue) =>
- (data.node.template[n].value = newValue)
- }
+ onSelect={(t) => handleOnNewValue(t, n)}
value={
data.node.template[n].value ??
"Choose an option"
@@ -222,11 +239,11 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
) : data.node.template[n].type === "int" ? (
{
- data.node.template[n].value = t;
+ handleOnNewValue(t, n);
}}
/>
@@ -234,15 +251,15 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
{
- data.node.template[n].value = t;
+ handleOnNewValue(t, n);
}}
fileTypes={data.node.template[n].fileTypes}
suffixes={data.node.template[n].suffixes}
onFileChange={(t: string) => {
- data.node.template[n].content = t;
+ handleOnNewValue(t, n);
}}
>
@@ -251,10 +268,14 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
{
+ data.node = nodeClass;
+ }}
value={data.node.template[n].value ?? ""}
onChange={(t: string) => {
- data.node.template[n].value = t;
+ handleOnNewValue(t, n);
}}
/>
@@ -272,7 +293,7 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
editNode={true}
value={data.node.template[n].value ?? ""}
onChange={(t: string) => {
- data.node.template[n].value = t;
+ handleOnNewValue(t, n);
}}
/>
@@ -289,7 +310,7 @@ export default function EditNodeModal({ data }: { data: NodeDataType }) {
setEnabled={(e) =>
changeAdvanced(data.node.template[n])
}
- disabled={false}
+ disabled={disabled}
size="small"
/>
diff --git a/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx b/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx
index db63adc39..c642d0e07 100644
--- a/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx
+++ b/src/frontend/src/modals/NodeModal/components/ModalField/index.tsx
@@ -52,7 +52,7 @@ export default function ModalField({
{display && (
{title}
- {required ? " *" : ""}
+ {required ? " *" : ""}
)}
diff --git a/src/frontend/src/modals/baseModal/index.tsx b/src/frontend/src/modals/baseModal/index.tsx
index 13893d8c1..4b135383c 100644
--- a/src/frontend/src/modals/baseModal/index.tsx
+++ b/src/frontend/src/modals/baseModal/index.tsx
@@ -54,7 +54,7 @@ function BaseModal({ open, setOpen, children }: BaseModalProps) {
);
//UPDATE COLORS AND STYLE CLASSSES
return (
-
+
{headerChild}
diff --git a/src/frontend/src/modals/chatModal/chatMessage/index.tsx b/src/frontend/src/modals/chatModal/chatMessage/index.tsx
deleted file mode 100644
index 5e130015a..000000000
--- a/src/frontend/src/modals/chatModal/chatMessage/index.tsx
+++ /dev/null
@@ -1,157 +0,0 @@
-import Convert from "ansi-to-html";
-import DOMPurify from "dompurify";
-import { MessageCircle, User2 } from "lucide-react";
-import { useEffect, useRef, useState } from "react";
-import ReactMarkdown from "react-markdown";
-import rehypeMathjax from "rehype-mathjax";
-import remarkGfm from "remark-gfm";
-import remarkMath from "remark-math";
-import AiIcon from "../../../assets/Gooey Ring-5s-271px.svg";
-import AiIconStill from "../../../assets/froze-flow.png";
-import { ChatMessageType } from "../../../types/chat";
-import { classNames } from "../../../utils";
-import FileCard from "../fileComponent";
-import { CodeBlock } from "./codeBlock";
-export default function ChatMessage({
- chat,
- lockChat,
- lastMessage,
-}: {
- chat: ChatMessageType;
- lockChat: boolean;
- lastMessage: boolean;
-}) {
- const convert = new Convert({ newline: true });
- const [message, setMessage] = useState("");
- const imgRef = useRef(null);
- useEffect(() => {
- setMessage(chat.message);
- }, [chat.message]);
- const [hidden, setHidden] = useState(true);
- return (
-
-
- {!chat.isSend && (
-
-
-
-
- )}
- {chat.isSend &&
}
-
- {!chat.isSend ? (
-
-
- {hidden && chat.thought && chat.thought !== "" && (
-
setHidden((prev) => !prev)}
- className="chat-message-modal-icon-div"
- >
-
-
- )}
- {chat.thought && chat.thought !== "" && !hidden && (
-
setHidden((prev) => !prev)}
- className=" chat-message-modal-thought"
- dangerouslySetInnerHTML={{
- __html: DOMPurify.sanitize(convert.toHtml(chat.thought)),
- }}
- >
- )}
- {chat.thought && chat.thought !== "" && !hidden &&
}
-
-
-
-
- ▍
-
- );
- }
-
- children[0] = (children[0] as string).replace(
- "`▍`",
- "▍"
- );
- }
-
- const match = /language-(\w+)/.exec(className || "");
-
- return !inline ? (
-
- ) : (
-
- {children}
-
- );
- },
- }}
- >
- {message}
-
-
- {chat.files && (
-
- {chat.files.map((file, index) => {
- return (
-
-
-
- );
- })}
-
- )}
-
-
-
-
- ) : (
-
-
- {message.split("\n").map((line, index) => (
-
- {line}
-
-
- ))}
-
-
- )}
-
- );
-}
diff --git a/src/frontend/src/modals/chatModal/index.tsx b/src/frontend/src/modals/chatModal/index.tsx
deleted file mode 100644
index 5cbc2d17b..000000000
--- a/src/frontend/src/modals/chatModal/index.tsx
+++ /dev/null
@@ -1,415 +0,0 @@
-import { Dialog, Transition } from "@headlessui/react";
-import { Eraser, MessagesSquare, X } from "lucide-react";
-import { Fragment, useContext, useEffect, useRef, useState } from "react";
-import { alertContext } from "../../contexts/alertContext";
-import { typesContext } from "../../contexts/typesContext";
-import { sendAllProps } from "../../types/api";
-import { ChatMessageType } from "../../types/chat";
-import { FlowType } from "../../types/flow";
-import { validateNodes } from "../../utils";
-import ChatInput from "./chatInput";
-import ChatMessage from "./chatMessage";
-
-import _ from "lodash";
-import { getHealth } from "../../controllers/API";
-
-export default function ChatModal({
- flow,
- open,
- setOpen,
-}: {
- open: boolean;
- setOpen: Function;
- flow: FlowType;
-}) {
- const [chatValue, setChatValue] = useState("");
- const [chatHistory, setChatHistory] = useState([]);
- const { reactFlowInstance } = useContext(typesContext);
- const { setErrorData, setNoticeData } = useContext(alertContext);
- const ws = useRef(null);
- const [lockChat, setLockChat] = useState(false);
- const isOpen = useRef(open);
- const messagesRef = useRef(null);
- const id = useRef(flow.id);
-
- useEffect(() => {
- if (messagesRef.current) {
- messagesRef.current.scrollTop = messagesRef.current.scrollHeight;
- }
- }, [chatHistory]);
-
- useEffect(() => {
- isOpen.current = open;
- }, [open]);
- useEffect(() => {
- id.current = flow.id;
- }, [flow.id]);
-
- var isStream = false;
-
- const addChatHistory = (
- message: string,
- isSend: boolean,
- thought?: string,
- files?: Array
- ) => {
- setChatHistory((old) => {
- let newChat = _.cloneDeep(old);
- if (files) {
- newChat.push({ message, isSend, files, thought });
- } else if (thought) {
- newChat.push({ message, isSend, thought });
- } else {
- newChat.push({ message, isSend });
- }
- return newChat;
- });
- };
-
- //add proper type signature for function
-
- function updateLastMessage({
- str,
- thought,
- end = false,
- files,
- }: {
- str?: string;
- thought?: string;
- // end param default is false
- end?: boolean;
- files?: Array;
- }) {
- setChatHistory((old) => {
- let newChat = [...old];
- if (str) {
- if (end) {
- newChat[newChat.length - 1].message = str;
- } else {
- newChat[newChat.length - 1].message =
- newChat[newChat.length - 1].message + str;
- }
- }
- if (thought) {
- newChat[newChat.length - 1].thought = thought;
- }
- if (files) {
- newChat[newChat.length - 1].files = files;
- }
- return newChat;
- });
- }
-
- function handleOnClose(event: CloseEvent) {
- if (isOpen.current) {
- setErrorData({ title: event.reason });
- setTimeout(() => {
- connectWS();
- setLockChat(false);
- }, 1000);
- }
- }
-
- function getWebSocketUrl(chatId, isDevelopment = false) {
- const isSecureProtocol = window.location.protocol === "https:";
- const webSocketProtocol = isSecureProtocol ? "wss" : "ws";
- const host = isDevelopment ? "localhost:7860" : window.location.host;
- const chatEndpoint = `/api/v1/chat/${chatId}`;
-
- return `${
- isDevelopment ? "ws" : webSocketProtocol
- }://${host}${chatEndpoint}`;
- }
-
- function handleWsMessage(data: any) {
- if (Array.isArray(data)) {
- //set chat history
- setChatHistory((_) => {
- let newChatHistory: ChatMessageType[] = [];
- data.forEach(
- (chatItem: {
- intermediate_steps?: "string";
- is_bot: boolean;
- message: string;
- type: string;
- files?: Array;
- }) => {
- if (chatItem.message) {
- newChatHistory.push(
- chatItem.files
- ? {
- isSend: !chatItem.is_bot,
- message: chatItem.message,
- thought: chatItem.intermediate_steps,
- files: chatItem.files,
- }
- : {
- isSend: !chatItem.is_bot,
- message: chatItem.message,
- thought: chatItem.intermediate_steps,
- }
- );
- }
- }
- );
- return newChatHistory;
- });
- }
- if (data.type === "start") {
- addChatHistory("", false);
- isStream = true;
- }
- if (data.type === "end") {
- if (data.message) {
- updateLastMessage({ str: data.message, end: true });
- }
- if (data.intermediate_steps) {
- updateLastMessage({
- str: data.message,
- thought: data.intermediate_steps,
- end: true,
- });
- }
- if (data.files) {
- updateLastMessage({
- end: true,
- files: data.files,
- });
- }
-
- setLockChat(false);
- isStream = false;
- }
- if (data.type === "stream" && isStream) {
- updateLastMessage({ str: data.message });
- }
- }
-
- function connectWS() {
- try {
- const urlWs = getWebSocketUrl(
- id.current,
- process.env.NODE_ENV === "development"
- );
- const newWs = new WebSocket(urlWs);
- newWs.onopen = () => {
- console.log("WebSocket connection established!");
- };
- newWs.onmessage = (event) => {
- const data = JSON.parse(event.data);
- console.log("Received data:", data);
- handleWsMessage(data);
- //get chat history
- };
- newWs.onclose = (event) => {
- handleOnClose(event);
- };
- newWs.onerror = (ev) => {
- getHealth()
- .then((res) => {
- if (res.status === 200) {
- connectWS();
- }
- })
- .catch((err) => {
- setErrorData({
- // message when the backend failed
- title: "The backend is not responding. Please try again later.",
- // possible solution list
- list: [
- "Check your internet connection.",
- "Check if the backend is running.",
- ],
- });
- });
- };
- ws.current = newWs;
- } catch (error) {
- connectWS();
- console.log(error);
- }
- }
-
- useEffect(() => {
- connectWS();
- return () => {
- console.log("unmount");
- console.log(ws);
- if (ws.current) {
- ws.current.close();
- }
- };
- }, []);
-
- useEffect(() => {
- if (
- ws.current &&
- (ws.current.readyState === ws.current.CLOSED ||
- ws.current.readyState === ws.current.CLOSING)
- ) {
- connectWS();
- setLockChat(false);
- }
- }, [lockChat]);
-
- async function sendAll(data: sendAllProps) {
- try {
- if (ws) {
- ws.current.send(JSON.stringify(data));
- }
- } catch (error) {
- setErrorData({
- title: "There was an error sending the message",
- list: [error.message],
- });
- setChatValue(data.message);
- connectWS();
- }
- }
-
- useEffect(() => {
- if (ref.current) ref.current.scrollIntoView({ behavior: "smooth" });
- }, [chatHistory]);
-
- const ref = useRef(null);
-
- useEffect(() => {
- if (open && ref.current) {
- ref.current.focus();
- }
- }, [open]);
-
- function sendMessage() {
- if (chatValue !== "") {
- let nodeValidationErrors = validateNodes(reactFlowInstance);
- if (nodeValidationErrors.length === 0) {
- setLockChat(true);
- let message = chatValue;
- setChatValue("");
- addChatHistory(message, true);
- sendAll({
- ...reactFlowInstance.toObject(),
- message,
- chatHistory,
- name: flow.name,
- description: flow.description,
- });
- } else {
- setErrorData({
- title: "Oops! Looks like you missed some required information:",
- list: nodeValidationErrors,
- });
- }
- } else {
- setErrorData({
- title: "Error sending message",
- list: ["The message cannot be empty."],
- });
- }
- }
- function clearChat() {
- setChatHistory([]);
- ws.current.send(JSON.stringify({ clear_history: true }));
- if (lockChat) setLockChat(false);
- }
-
- function setModalOpen(x: boolean) {
- setOpen(x);
- }
- return (
-
-
-
-
-
-
-
-
-
-
-
- clearChat()}
- className="chat-modal-dialog-trash-panel"
- >
-
-
- setModalOpen(false)}
- className="chat-modal-dialog-x-panel"
- >
-
-
-
-
- {chatHistory.length > 0 ? (
- chatHistory.map((c, i) => (
-
- ))
- ) : (
-
-
- 👋{" "}
-
- Langflow Chat
-
-
-
-
-
- Start a conversation and click the agent’s thoughts{" "}
-
-
- {" "}
- to inspect the chaining process.
-
-
-
- )}
-
-
-
-
-
-
-
-
-
- );
-}
diff --git a/src/frontend/src/modals/codeAreaModal/index.tsx b/src/frontend/src/modals/codeAreaModal/index.tsx
index f310a219c..e48f1e4f9 100644
--- a/src/frontend/src/modals/codeAreaModal/index.tsx
+++ b/src/frontend/src/modals/codeAreaModal/index.tsx
@@ -10,16 +10,9 @@ import "ace-builds/src-noconflict/ext-language_tools";
import "ace-builds/src-noconflict/ace";
// import "ace-builds/webpack-resolver";
import { TerminalSquare } from "lucide-react";
+import { useContext, useState } from "react";
+import AceEditor from "react-ace";
import { Button } from "../../components/ui/button";
-import {
- Dialog,
- DialogContent,
- DialogDescription,
- DialogFooter,
- DialogHeader,
- DialogTitle,
- DialogTrigger,
-} from "../../components/ui/dialog";
import { CODE_PROMPT_DIALOG_SUBTITLE } from "../../constants";
import { alertContext } from "../../contexts/alertContext";
import { darkContext } from "../../contexts/darkContext";
@@ -45,9 +38,7 @@ export default function CodeAreaModal({
setNodeClass: (Class: APIClassType) => void;
dynamic?: boolean;
}) {
- const [open, setOpen] = useState(true);
const [code, setCode] = useState(value);
- const [loading, setLoading] = useState(false);
const { dark } = useContext(darkContext);
const { setErrorData, setSuccessData } = useContext(alertContext);
const [activeTab, setActiveTab] = useState("0");
@@ -55,14 +46,12 @@ export default function CodeAreaModal({
detail: { error: string; traceback: string };
}>(null);
const { closePopUp, setCloseEdit } = useContext(PopUpContext);
- const ref = useRef();
+ const { setErrorData, setSuccessData } = useContext(alertContext);
+
function setModalOpen(x: boolean) {
- setOpen(x);
if (x === false) {
- setTimeout(() => {
- setCloseEdit("editcode");
- closePopUp();
- }, 300);
+ setCloseEdit("codearea");
+ closePopUp();
}
}
console.log(dynamic);
diff --git a/src/frontend/src/modals/exportModal/index.tsx b/src/frontend/src/modals/exportModal/index.tsx
index f4a66c7b9..fb6867811 100644
--- a/src/frontend/src/modals/exportModal/index.tsx
+++ b/src/frontend/src/modals/exportModal/index.tsx
@@ -71,10 +71,7 @@ export default function ExportModal() {
setChecked(event);
}}
/>
-
+
Save with my API keys
diff --git a/src/frontend/src/modals/formModal/chatInput/index.tsx b/src/frontend/src/modals/formModal/chatInput/index.tsx
index 3b31b0d42..ec62c0505 100644
--- a/src/frontend/src/modals/formModal/chatInput/index.tsx
+++ b/src/frontend/src/modals/formModal/chatInput/index.tsx
@@ -1,4 +1,4 @@
-import { Lock, LucideSend } from "lucide-react";
+import { Lock, LucideSend, Sparkles } from "lucide-react";
import { useEffect } from "react";
import { classNames } from "../../../utils";
@@ -8,6 +8,7 @@ export default function ChatInput({
sendMessage,
setChatValue,
inputRef,
+ noInput,
}) {
useEffect(() => {
if (!lockChat && inputRef.current) {
@@ -32,7 +33,7 @@ export default function ChatInput({
}}
rows={1}
ref={inputRef}
- disabled={lockChat}
+ disabled={lockChat || noInput}
style={{
resize: "none",
bottom: `${inputRef?.current?.scrollHeight}px`,
@@ -49,31 +50,38 @@ export default function ChatInput({
}}
className={classNames(
lockChat
- ? " bg-input text-black dark:bg-gray-700 dark:text-gray-300"
- : " bg-white-200 text-black dark:bg-gray-900 dark:text-gray-300",
- "form-input block w-full rounded-md border-gray-300 p-4 pr-16 custom-scroll dark:border-gray-600 sm:text-sm"
+ ? " form-modal-lock-true bg-input"
+ : noInput
+ ? "form-modal-no-input bg-input"
+ : " form-modal-lock-false bg-background",
+
+ "form-modal-lockchat"
)}
- placeholder={"Send a message..."}
+ placeholder={
+ noInput
+ ? "No chat input variables found. Click to run your flow."
+ : "Send a message..."
+ }
/>
-
+
sendMessage()}
>
{lockChat ? (
-
+
+ ) : noInput ? (
+
) : (
-
+
)}
diff --git a/src/frontend/src/modals/formModal/chatMessage/codeBlock/index.tsx b/src/frontend/src/modals/formModal/chatMessage/codeBlock/index.tsx
index c78974a3c..884215ebb 100644
--- a/src/frontend/src/modals/formModal/chatMessage/codeBlock/index.tsx
+++ b/src/frontend/src/modals/formModal/chatMessage/codeBlock/index.tsx
@@ -1,5 +1,5 @@
import { IconCheck, IconClipboard, IconDownload } from "@tabler/icons-react";
-import { FC, memo, useState } from "react";
+import { useState } from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { oneDark } from "react-syntax-highlighter/dist/cjs/styles/prism";
import { programmingLanguages } from "../../../../utils";
@@ -9,7 +9,7 @@ interface Props {
value: string;
}
-export const CodeBlock: FC
= memo(({ language, value }) => {
+export function CodeBlock({ language, value }) {
const [isCopied, setIsCopied] = useState(false);
const copyToClipboard = () => {
@@ -63,7 +63,7 @@ export const CodeBlock: FC = memo(({ language, value }) => {
= memo(({ language, value }) => {
);
-});
+}
CodeBlock.displayName = "CodeBlock";
diff --git a/src/frontend/src/modals/formModal/chatMessage/index.tsx b/src/frontend/src/modals/formModal/chatMessage/index.tsx
index af3388596..8cbf0877c 100644
--- a/src/frontend/src/modals/formModal/chatMessage/index.tsx
+++ b/src/frontend/src/modals/formModal/chatMessage/index.tsx
@@ -1,18 +1,18 @@
import Convert from "ansi-to-html";
import { ChevronDown } from "lucide-react";
-import { useState } from "react";
+import { useMemo, useState } from "react";
import ReactMarkdown from "react-markdown";
import rehypeMathjax from "rehype-mathjax";
import remarkGfm from "remark-gfm";
import remarkMath from "remark-math";
import MaleTechnology from "../../../assets/male-technologist.png";
import Robot from "../../../assets/robot.png";
+import SanitizedHTMLWrapper from "../../../components/SanitizedHTMLWrapper";
import { THOUGHTS_ICON } from "../../../constants";
import { ChatMessageType } from "../../../types/chat";
import { classNames } from "../../../utils";
import FileCard from "../fileComponent";
import { CodeBlock } from "./codeBlock";
-
export default function ChatMessage({
chat,
lockChat,
@@ -24,96 +24,111 @@ export default function ChatMessage({
}) {
const convert = new Convert({ newline: true });
const [hidden, setHidden] = useState(true);
- const [template, setTemplate] = useState(chat.template);
+ const template = chat.template;
const [promptOpen, setPromptOpen] = useState(false);
return (
-
+
{!chat.isSend ? (
-
-
-
+
+
+
) : (
-
-
-
+
+
+
)}
{!chat.isSend ? (
-
-
+
+
{hidden && chat.thought && chat.thought !== "" && (
setHidden((prev) => !prev)}
- className="absolute -left-8 -top-3 cursor-pointer"
+ className="form-modal-chat-icon-div"
>
-
+
)}
{chat.thought && chat.thought !== "" && !hidden && (
-
setHidden((prev) => !prev)}
- className=" ml-3 inline-block h-full w-[95%] cursor-pointer overflow-scroll rounded-md border
- border-gray-300 bg-muted px-2 text-start text-primary scrollbar-hide dark:border-gray-500 dark:bg-gray-800"
- dangerouslySetInnerHTML={{
- __html: convert.toHtml(chat.thought),
- }}
- >
+ />
)}
{chat.thought && chat.thought !== "" && !hidden &&
}
-
- ▍
-
+ {useMemo(
+ () => (
+ {
+ if (children.length) {
+ if (children[0] === "▍") {
+ return (
+
+ ▍
+
+ );
+ }
+
+ children[0] = (children[0] as string).replace(
+ "`▍`",
+ "▍"
+ );
+ }
+
+ const match = /language-(\w+)/.exec(
+ className || ""
);
- }
- children[0] = (children[0] as string).replace(
- "`▍`",
- "▍"
- );
- }
-
- const match = /language-(\w+)/.exec(className || "");
-
- return !inline ? (
-
- ) : (
-
- {children}
-
- );
- },
- }}
- >
- {chat.message.toString()}
-
+ return !inline ? (
+
+ ) : (
+
+ {children}
+
+ );
+ },
+ }}
+ >
+ {chat.message.toString()}
+
+ ),
+ [chat.message, chat.message.toString()]
+ )}
{chat.files && (
@@ -136,51 +151,57 @@ export default function ChatMessage({
) : (
-
{
- setPromptOpen((old) => !old);
- }}
- >
- Initial Prompt
-
-
-
- {promptOpen
- ? template.split("\n").map((line, index) => {
- const regex = /{([^}]+)}/g;
- let match;
- let parts = [];
- let lastIndex = 0;
- while ((match = regex.exec(line)) !== null) {
- // Push text up to the match
- if (match.index !== lastIndex) {
- parts.push(line.substring(lastIndex, match.index));
- }
- // Push div with matched text
- if (chat.message[match[1]]) {
- parts.push(
-
- {chat.message[match[1]]}
-
- );
- }
+ {template ? (
+ <>
+ {
+ setPromptOpen((old) => !old);
+ }}
+ >
+ Display Prompt
+
+
+
+ {promptOpen
+ ? template?.split("\n")?.map((line, index) => {
+ const regex = /{([^}]+)}/g;
+ let match;
+ let parts = [];
+ let lastIndex = 0;
+ while ((match = regex.exec(line)) !== null) {
+ // Push text up to the match
+ if (match.index !== lastIndex) {
+ parts.push(line.substring(lastIndex, match.index));
+ }
+ // Push div with matched text
+ if (chat.message[match[1]]) {
+ parts.push(
+
+ {chat.message[match[1]]}
+
+ );
+ }
- // Update last index
- lastIndex = regex.lastIndex;
- }
- // Push text after the last match
- if (lastIndex !== line.length) {
- parts.push(line.substring(lastIndex));
- }
- return {parts}
;
- })
- : chat.message[chat.chatKey]}
-
+ // Update last index
+ lastIndex = regex.lastIndex;
+ }
+ // Push text after the last match
+ if (lastIndex !== line.length) {
+ parts.push(line.substring(lastIndex));
+ }
+ return {parts}
;
+ })
+ : chat.message[chat.chatKey]}
+
+ >
+ ) : (
+
{chat.message[chat.chatKey]}
+ )}
)}
diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx
index 0168708ec..45c101f3f 100644
--- a/src/frontend/src/modals/formModal/index.tsx
+++ b/src/frontend/src/modals/formModal/index.tsx
@@ -50,7 +50,7 @@ export default function FormModal({
const handleKeys = formKeysData.handle_keys;
const keyToUse = Object.keys(inputKeys).find(
- (k) => !handleKeys.some((j) => j === k)
+ (k) => !handleKeys.some((j) => j === k) && inputKeys[k] === ""
);
return inputKeys[keyToUse];
@@ -63,15 +63,19 @@ export default function FormModal({
const [chatHistory, setChatHistory] = useState
([]);
const { reactFlowInstance } = useContext(typesContext);
- const { setErrorData, setNoticeData } = useContext(alertContext);
+ const { setErrorData } = useContext(alertContext);
const ws = useRef(null);
const [lockChat, setLockChat] = useState(false);
const isOpen = useRef(open);
const messagesRef = useRef(null);
const id = useRef(flow.id);
+ const tabsStateFlowId = tabsState[flow.id];
+ const tabsStateFlowIdFormKeysData = tabsStateFlowId.formKeysData;
const [chatKey, setChatKey] = useState(
Object.keys(tabsState[flow.id].formKeysData.input_keys).find(
- (k) => !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k)
+ (k) =>
+ !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) &&
+ tabsState[flow.id].formKeysData.input_keys[k] === ""
)
);
@@ -86,7 +90,7 @@ export default function FormModal({
}, [open]);
useEffect(() => {
id.current = flow.id;
- }, [flow.id, tabsState[flow.id], tabsState[flow.id].formKeysData]);
+ }, [flow.id, tabsStateFlowId, tabsStateFlowIdFormKeysData]);
var isStream = false;
@@ -290,6 +294,7 @@ export default function FormModal({
ws.current.close();
}
};
+ // do not add connectWS on dependencies array
}, []);
useEffect(() => {
@@ -301,6 +306,7 @@ export default function FormModal({
connectWS();
setLockChat(false);
}
+ // do not add connectWS on dependencies array
}, [lockChat]);
async function sendAll(data: sendAllProps) {
@@ -331,41 +337,35 @@ export default function FormModal({
}, [open]);
function sendMessage() {
- if (chatValue !== "") {
- let nodeValidationErrors = validateNodes(reactFlowInstance);
- if (nodeValidationErrors.length === 0) {
- setLockChat(true);
- let inputs = tabsState[id.current].formKeysData.input_keys;
- setChatValue("");
- const message = inputs;
- addChatHistory(
- message,
- true,
- chatKey,
- tabsState[flow.id].formKeysData.template
- );
- sendAll({
- ...reactFlowInstance.toObject(),
- inputs: inputs,
- chatHistory,
- name: flow.name,
- description: flow.description,
- });
- setTabsState((old) => {
- let newTabsState = _.cloneDeep(old);
- newTabsState[id.current].formKeysData.input_keys[chatKey] = "";
- return newTabsState;
- });
- } else {
- setErrorData({
- title: "Oops! Looks like you missed some required information:",
- list: nodeValidationErrors,
- });
- }
+ let nodeValidationErrors = validateNodes(reactFlowInstance);
+ if (nodeValidationErrors.length === 0) {
+ setLockChat(true);
+ let inputs = tabsState[id.current].formKeysData.input_keys;
+ setChatValue("");
+ const message = inputs;
+ addChatHistory(
+ message,
+ true,
+ chatKey,
+ tabsState[flow.id].formKeysData.template
+ );
+ sendAll({
+ ...reactFlowInstance.toObject(),
+ inputs: inputs,
+ chatHistory,
+ name: flow.name,
+ description: flow.description,
+ });
+ setTabsState((old) => {
+ if (!chatKey) return old;
+ let newTabsState = _.cloneDeep(old);
+ newTabsState[id.current].formKeysData.input_keys[chatKey] = "";
+ return newTabsState;
+ });
} else {
setErrorData({
- title: "Error sending message",
- list: ["The message cannot be empty."],
+ title: "Oops! Looks like you missed some required information:",
+ list: nodeValidationErrors,
});
}
}
@@ -383,6 +383,9 @@ export default function FormModal({
if (checked === true) {
setChatKey(i);
setChatValue(tabsState[flow.id].formKeysData.input_keys[i]);
+ } else {
+ setChatKey(null);
+ setChatValue("");
}
}
return (
@@ -401,19 +404,19 @@ export default function FormModal({
{CHAT_FORM_DIALOG_SUBTITLE}
-
-
-
-
-
+
+
+
+
+
Input Variables
-
-
+
+
Name
-
+
Chat Input
@@ -422,10 +425,10 @@ export default function FormModal({
{Object.keys(tabsState[id.current].formKeysData.input_keys).map(
(i, k) => (
-
+
-
+
{i}
@@ -450,7 +453,7 @@ export default function FormModal({
-
+
{tabsState[
id.current
].formKeysData.handle_keys.some((t) => t === i) && (
@@ -482,21 +485,21 @@ export default function FormModal({
)}
{tabsState[id.current].formKeysData.memory_keys.map((i, k) => (
-
+
{i}
- Used as Memory Key
+ Used as memory key
))}
-
-
-
+
+
+
clearChat()}>
-
+
{chatHistory.length > 0 ? (
chatHistory.map((c, i) => (
))
) : (
-
+
👋{" "}
-
+
LangFlow Chat
-
-
+
+
Start a conversation and click the agent's thoughts{" "}
@@ -544,10 +546,11 @@ export default function FormModal({
)}
-
-
+
+
{
diff --git a/src/frontend/src/modals/genericModal/index.tsx b/src/frontend/src/modals/genericModal/index.tsx
index ba3fb672f..b2cf02fdb 100644
--- a/src/frontend/src/modals/genericModal/index.tsx
+++ b/src/frontend/src/modals/genericModal/index.tsx
@@ -1,24 +1,12 @@
-import DOMPurify from "dompurify";
import { FileText, Variable } from "lucide-react";
import { useContext, useEffect, useRef, useState } from "react";
+import SanitizedHTMLWrapper from "../../components/SanitizedHTMLWrapper";
import ShadTooltip from "../../components/ShadTooltipComponent";
import { Badge } from "../../components/ui/badge";
import { Button } from "../../components/ui/button";
-import {
- Dialog,
- DialogContent,
- DialogDescription,
- DialogFooter,
- DialogHeader,
- DialogTitle,
- DialogTrigger,
-} from "../../components/ui/dialog";
+import { DialogTitle } from "../../components/ui/dialog";
import { Textarea } from "../../components/ui/textarea";
-import {
- HIGHLIGH_CSS,
- PROMPT_DIALOG_SUBTITLE,
- TEXT_DIALOG_SUBTITLE,
-} from "../../constants";
+import { PROMPT_DIALOG_SUBTITLE, TEXT_DIALOG_SUBTITLE } from "../../constants";
import { alertContext } from "../../contexts/alertContext";
import { darkContext } from "../../contexts/darkContext";
import { PopUpContext } from "../../contexts/popUpContext";
@@ -32,6 +20,7 @@ import {
regexHighlight,
varHighlightHTML,
} from "../../utils";
+import BaseModal from "../baseModal";
export default function GenericModal({
field_name = "",
@@ -57,7 +46,6 @@ export default function GenericModal({
const [myModalType] = useState(type);
const [inputValue, setInputValue] = useState(value);
const [isEdit, setIsEdit] = useState(true);
- const [wordsHighlightInvalid, setWordsHighlightInvalid] = useState([]);
const [wordsHighlight, setWordsHighlight] = useState([]);
const { dark } = useContext(darkContext);
const { setErrorData, setSuccessData, setNoticeData } =
@@ -102,15 +90,14 @@ export default function GenericModal({
(word) => !invalid_chars.includes(word)
);
- setWordsHighlightInvalid(invalid_chars);
setWordsHighlight(filteredWordsHighlight);
}
useEffect(() => {
- if (type == TypeModal.PROMPT && inputValue && inputValue != "") {
+ if (type === TypeModal.PROMPT && inputValue && inputValue != "") {
checkVariables(inputValue);
}
- }, []);
+ }, [inputValue, type]);
const coloredContent = (inputValue || "")
.replace(/ {
return (
- {
setIsEdit(true);
}}
+ suppressWarning={true}
/>
);
};
@@ -135,10 +122,9 @@ export default function GenericModal({
postValidatePrompt(field_name, inputValue, nodeClass)
.then((apiReturn) => {
if (apiReturn.data) {
- setNodeClass(apiReturn.data.frontend_node);
-
- let inputVariables = apiReturn.data.input_variables;
- if (inputVariables.length === 0) {
+ setNodeClass(apiReturn.data?.frontend_node);
+ let inputVariables = apiReturn.data.input_variables ?? [];
+ if (inputVariables && inputVariables.length === 0) {
setIsEdit(true);
setNoticeData({
title: "Your template does not have any variables.",
@@ -159,143 +145,147 @@ export default function GenericModal({
}
})
.catch((error) => {
+ console.log(error);
setIsEdit(true);
return setErrorData({
title: "There is something wrong with this prompt, please review it",
- list: [error.response.data.detail],
+ list: [error?.response?.data?.detail],
});
});
}
return (
-
-
-
-
-
- {myModalTitle}
-
-
-
- {(() => {
- switch (myModalTitle) {
- case "Edit Text":
- return TEXT_DIALOG_SUBTITLE;
+
+ {
+ switch (myModalTitle) {
+ case "Edit Text":
+ return TEXT_DIALOG_SUBTITLE;
- case "Edit Prompt":
- return PROMPT_DIALOG_SUBTITLE;
+ case "Edit Prompt":
+ return PROMPT_DIALOG_SUBTITLE;
- default:
- return null;
- }
- })()}
-
-
-
-
- {type == TypeModal.PROMPT && isEdit ? (
-
-
- {type == TypeModal.PROMPT && (
- <>
-
-
-
-
- Input Variables:
-
-
- {wordsHighlight.map((word, index) => (
-
-
-
-
- {word.replace(/[{}]/g, "").length > 59
- ? word.replace(/[{}]/g, "").slice(0, 56) + "..."
- : word.replace(/[{}]/g, "")}
-
-
-
-
- ))}
-
-
- >
- )}
-
-
- {
- switch (myModalType) {
- case 1:
- setValue(inputValue);
- setModalOpen(false);
- break;
- case 2:
- !inputValue || inputValue == ""
- ? setModalOpen(false)
- : validatePrompt(false);
- break;
-
- default:
- break;
- }
- }}
- type="submit"
+ default:
+ return null;
+ }
+ })()}
+ >
+
+ {myModalTitle}
+
+
+
+
+
+
- {myButtonText}
-
-
-
-
+ {type === TypeModal.PROMPT && isEdit ? (
+ {
+ setIsEdit(false);
+ }}
+ autoFocus
+ onChange={(e) => {
+ setInputValue(e.target.value);
+ checkVariables(e.target.value);
+ }}
+ placeholder="Type message here."
+ />
+ ) : type === TypeModal.PROMPT && !isEdit ? (
+
+ ) : type !== TypeModal.PROMPT ? (
+ {
+ setInputValue(e.target.value);
+ }}
+ placeholder="Type message here."
+ />
+ ) : (
+ <>>
+ )}
+
+
+
+
+ {type === TypeModal.PROMPT && (
+
+
+
+
+
+ Prompt Variables:
+
+
+ {wordsHighlight.map((word, index) => (
+
+
+
+
+ {word.replace(/[{}]/g, "").length > 59
+ ? word.replace(/[{}]/g, "").slice(0, 56) +
+ "..."
+ : word.replace(/[{}]/g, "")}
+
+
+
+
+ ))}
+
+
+
+ Prompt variables can be created with any chosen name inside
+ curly brackets, e.g. {"{variable_name}"}
+
+
+ )}
+
+
{
+ switch (myModalType) {
+ case 1:
+ setValue(inputValue);
+ setModalOpen(false);
+ break;
+ case 2:
+ !inputValue || inputValue === ""
+ ? setModalOpen(false)
+ : validatePrompt(false);
+ break;
+
+ default:
+ break;
+ }
+ }}
+ type="submit"
+ >
+ {myButtonText}
+
+
+
+
+
);
}
diff --git a/src/frontend/src/modals/promptModal/index.tsx b/src/frontend/src/modals/promptModal/index.tsx
deleted file mode 100644
index 54fd46528..000000000
--- a/src/frontend/src/modals/promptModal/index.tsx
+++ /dev/null
@@ -1,151 +0,0 @@
-import { Dialog, Transition } from "@headlessui/react";
-import { DocumentTextIcon, XMarkIcon } from "@heroicons/react/24/outline";
-import { Fragment, useContext, useRef, useState } from "react";
-import { alertContext } from "../../contexts/alertContext";
-import { darkContext } from "../../contexts/darkContext";
-import { PopUpContext } from "../../contexts/popUpContext";
-import { checkPrompt } from "../../controllers/API";
-export default function PromptAreaModal({
- value,
- setValue,
-}: {
- setValue: (value: string) => void;
- value: string;
-}) {
- const [open, setOpen] = useState(true);
- const [myValue, setMyValue] = useState(value);
- const { dark } = useContext(darkContext);
- const { setErrorData, setSuccessData } = useContext(alertContext);
- const { closePopUp, setCloseEdit } = useContext(PopUpContext);
- const ref = useRef();
- function setModalOpen(x: boolean) {
- setOpen(x);
- if (x === false) {
- setTimeout(() => {
- setCloseEdit("prompt");
- closePopUp();
- }, 300);
- }
- }
- return (
-
-
-
-
-
-
-
-
-
-
-
- {
- setModalOpen(false);
- }}
- >
- Close
-
-
-
-
-
-
-
-
-
-
- Edit Prompt
-
-
-
-
-
-
- {
- setMyValue(e.target.value);
- setValue(e.target.value);
- }}
- />
-
-
-
-
- {
- checkPrompt(myValue)
- .then((apiReturn) => {
- if (apiReturn.data) {
- let inputVariables =
- apiReturn.data.input_variables;
- if (inputVariables.length === 0) {
- setErrorData({
- title:
- "The template you are attempting to use does not contain any variables for data entry.",
- });
- } else {
- setSuccessData({
- title: "Prompt is ready",
- });
- setModalOpen(false);
- setValue(myValue);
- }
- } else {
- setErrorData({
- title: "Something went wrong, please try again",
- });
- }
- })
- .catch((error) => {
- return setErrorData({
- title:
- "There is something wrong with this prompt, please review it",
- list: [error.response.data.detail],
- });
- });
- }}
- >
- Check & Save
-
-
-
-
-
-
-
-
-
- );
-}
diff --git a/src/frontend/src/modals/textAreaModal/index.tsx b/src/frontend/src/modals/textAreaModal/index.tsx
deleted file mode 100644
index 83186bbea..000000000
--- a/src/frontend/src/modals/textAreaModal/index.tsx
+++ /dev/null
@@ -1,121 +0,0 @@
-import { Dialog, Transition } from "@headlessui/react";
-import {
- ClipboardDocumentListIcon,
- XMarkIcon,
-} from "@heroicons/react/24/outline";
-import { Fragment, useContext, useRef, useState } from "react";
-import { PopUpContext } from "../../contexts/popUpContext";
-
-export default function TextAreaModal({
- value,
- setValue,
-}: {
- setValue: (value: string) => void;
- value: string | string[];
-}) {
- const [open, setOpen] = useState(true);
- const [myValue, setMyValue] = useState(value);
- const { closePopUp, setCloseEdit } = useContext(PopUpContext);
- const ref = useRef();
- function setModalOpen(x: boolean) {
- setOpen(x);
- if (x === false) {
- setTimeout(() => {
- setCloseEdit("textarea");
- closePopUp();
- }, 300);
- }
- }
- return (
-
-
-
-
-
-
-
-
-
-
-
- {
- setModalOpen(false);
- }}
- >
- Close
-
-
-
-
-
-
-
-
-
-
- Edit text
-
-
-
-
-
-
- {
- setMyValue(e.target.value);
- setValue(e.target.value);
- }}
- />
-
-
-
-
- {
- setModalOpen(false);
- }}
- >
- Finish editing
-
-
-
-
-
-
-
-
-
- );
-}
diff --git a/src/frontend/src/pages/FlowPage/components/ConnectionLineComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/ConnectionLineComponent/index.tsx
index bc78bc5bd..61a493613 100644
--- a/src/frontend/src/pages/FlowPage/components/ConnectionLineComponent/index.tsx
+++ b/src/frontend/src/pages/FlowPage/components/ConnectionLineComponent/index.tsx
@@ -12,9 +12,8 @@ const ConnectionLineComponent = ({
diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx
index 51c84580e..a6ac0f4df 100644
--- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx
+++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx
@@ -184,11 +184,11 @@ export default function Page({ flow }: { flow: FlowType }) {
addEdge(
{
...params,
- style: { stroke: "inherit" },
+ style: { stroke: "#555" },
className:
- params.targetHandle.split("|")[0] === "Text"
+ (params.targetHandle.split("|")[0] === "Text"
? "stroke-foreground "
- : "stroke-foreground ",
+ : "stroke-foreground ") + " stroke-connection",
animated: params.targetHandle.split("|")[0] === "Text",
},
eds
@@ -288,7 +288,7 @@ export default function Page({ flow }: { flow: FlowType }) {
}
},
// Specify dependencies for useCallback
- [getNodeId, reactFlowInstance, setErrorData, setNodes, takeSnapshot]
+ [getNodeId, reactFlowInstance, setNodes, takeSnapshot]
);
useEffect(() => {
@@ -322,7 +322,7 @@ export default function Page({ flow }: { flow: FlowType }) {
setEdges((els) => updateEdge(oldEdge, newConnection, els));
}
},
- []
+ [reactFlowInstance, setEdges]
);
const onEdgeUpdateEnd = useCallback((_, edge) => {
@@ -338,7 +338,8 @@ export default function Page({ flow }: { flow: FlowType }) {
const onSelectionEnd = useCallback(() => {
setSelectionEnded(true);
}, []);
- const onSelectionStart = useCallback(() => {
+ const onSelectionStart = useCallback((event) => {
+ event.preventDefault();
setSelectionEnded(false);
}, []);
@@ -371,10 +372,11 @@ export default function Page({ flow }: { flow: FlowType }) {
{
- updateFlow({
- ...flow,
- data: reactFlowInstance.toObject(),
- });
+ if (reactFlowInstance)
+ updateFlow({
+ ...flow,
+ data: reactFlowInstance.toObject(),
+ });
}}
edges={edges}
onPaneClick={() => {
@@ -409,7 +411,6 @@ export default function Page({ flow }: { flow: FlowType }) {
nodesDraggable={!disableCopyPaste}
panOnDrag={!disableCopyPaste}
zoomOnDoubleClick={!disableCopyPaste}
- selectNodesOnDrag={false}
className="theme-attribution"
minZoom={0.01}
maxZoom={8}
diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
index 4a77975d4..f7223d67a 100644
--- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
+++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
@@ -162,7 +162,9 @@ export default function ExtraSidebar() {
;
diff --git a/src/frontend/src/types/tabs/index.ts b/src/frontend/src/types/tabs/index.ts
index 33acd1ee7..82e75e4e4 100644
--- a/src/frontend/src/types/tabs/index.ts
+++ b/src/frontend/src/types/tabs/index.ts
@@ -1,5 +1,5 @@
import { Dispatch, SetStateAction } from "react";
-import { FlowType } from "../flow";
+import { FlowType, TweaksType } from "../flow";
export type TabsContextType = {
saveFlow: (flow: FlowType) => Promise
;
@@ -32,8 +32,8 @@ export type TabsContextType = {
) => void;
lastCopiedSelection: { nodes: any; edges: any };
setLastCopiedSelection: (selection: { nodes: any; edges: any }) => void;
- setTweak: (tweak: any) => void;
- getTweak: any;
+ setTweak: (tweak: TweaksType) => void;
+ getTweak: TweaksType[];
};
export type TabsState = {
diff --git a/src/frontend/src/types/utils/reactflowUtils.ts b/src/frontend/src/types/utils/reactflowUtils.ts
new file mode 100644
index 000000000..ecbbda4e4
--- /dev/null
+++ b/src/frontend/src/types/utils/reactflowUtils.ts
@@ -0,0 +1,10 @@
+import { Edge } from "reactflow";
+import { NodeType } from "../flow";
+
+export type cleanEdgesType = {
+ flow: {
+ edges: Edge[];
+ nodes: NodeType[];
+ };
+ updateEdge: (edge: Edge[]) => void;
+};
diff --git a/src/frontend/src/util/reactflowUtils.ts b/src/frontend/src/util/reactflowUtils.ts
new file mode 100644
index 000000000..da04cc75c
--- /dev/null
+++ b/src/frontend/src/util/reactflowUtils.ts
@@ -0,0 +1,46 @@
+import _ from "lodash";
+import { cleanEdgesType } from "./../types/utils/reactflowUtils";
+
+export function cleanEdges({
+ flow: { edges, nodes },
+ updateEdge,
+}: cleanEdgesType) {
+ let newEdges = _.cloneDeep(edges);
+ edges.forEach((edge) => {
+ // check if the source and target node still exists
+ const sourceNode = nodes.find((node) => node.id === edge.source);
+ const targetNode = nodes.find((node) => node.id === edge.target);
+ if (!sourceNode || !targetNode) {
+ newEdges = newEdges.filter((e) => e.id !== edge.id);
+ }
+ // check if the source and target handle still exists
+ if (sourceNode && targetNode) {
+ const sourceHandle = edge.sourceHandle; //right
+ const targetHandle = edge.targetHandle; //left
+ if (targetHandle) {
+ const field = targetHandle.split("|")[1];
+ const id =
+ (targetNode.data.node.template[field]?.input_types?.join(";") ??
+ targetNode.data.node.template[field]?.type) +
+ "|" +
+ field +
+ "|" +
+ targetNode.data.id;
+ if (id !== targetHandle) {
+ newEdges = newEdges.filter((e) => e.id !== edge.id);
+ }
+ }
+ if (sourceHandle) {
+ const id = [
+ sourceNode.data.type,
+ sourceNode.data.id,
+ ...sourceNode.data.node.base_classes,
+ ].join("|");
+ if (id !== sourceHandle) {
+ newEdges = newEdges.filter((e) => e.id !== edge.id);
+ }
+ }
+ }
+ });
+ updateEdge(newEdges);
+}
diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts
index d9be31ad5..55bd72af3 100644
--- a/src/frontend/src/utils.ts
+++ b/src/frontend/src/utils.ts
@@ -35,7 +35,7 @@ import { EvernoteIcon } from "./icons/Evernote";
import { FBIcon } from "./icons/FacebookMessenger";
import { GitBookIcon } from "./icons/GitBook";
import { GoogleIcon } from "./icons/Google";
-import { HugginFaceIcon } from "./icons/HuggingFace";
+import { HuggingFaceIcon } from "./icons/HuggingFace";
import { IFixIcon } from "./icons/IFixIt";
import { MetaIcon } from "./icons/Meta";
import { MidjourneyIcon } from "./icons/Midjorney";
@@ -206,10 +206,10 @@ export const nodeIconsLucide: {
HNLoader: HackerNewsIcon as React.ForwardRefExoticComponent<
ComponentType>
>,
- HuggingFaceHub: HugginFaceIcon as React.ForwardRefExoticComponent<
+ HuggingFaceHub: HuggingFaceIcon as React.ForwardRefExoticComponent<
ComponentType>
>,
- HuggingFaceEmbeddings: HugginFaceIcon as React.ForwardRefExoticComponent<
+ HuggingFaceEmbeddings: HuggingFaceIcon as React.ForwardRefExoticComponent<
ComponentType>
>,
IFixitLoader: IFixIcon as React.ForwardRefExoticComponent<
@@ -608,7 +608,7 @@ export function isValidConnection(
) ||
targetHandle.split("|")[0] === "str"
) {
- let targetNode = reactFlowInstance.getNode(target).data.node;
+ let targetNode = reactFlowInstance?.getNode(target)?.data?.node;
if (!targetNode) {
if (
!reactFlowInstance
@@ -853,7 +853,7 @@ export function groupByFamily(data, baseClasses, left, type) {
});
});
- if (left == false) {
+ if (left === false) {
let groupedBy = arrOfType.filter((object, index, self) => {
const foundIndex = self.findIndex(
(o) => o.family === object.family && o.type === object.type
@@ -876,7 +876,7 @@ export function groupByFamily(data, baseClasses, left, type) {
});
}
- if (left == false) {
+ if (left === false) {
let resFil = result.filter((group) => group.family === parentOutput);
result = resFil;
}
@@ -903,8 +903,8 @@ export function groupByFamily(data, baseClasses, left, type) {
}
groupedArray.forEach((object, index, self) => {
- const findObj = arrOfLength.find((x) => x.type == object.family);
- if (object.component.length == findObj.length) {
+ const findObj = arrOfLength.find((x) => x.type === object.family);
+ if (object.component.length === findObj.length) {
self[index]["type"] = "";
} else {
self[index]["type"] = object.component.join(", ");
@@ -1048,10 +1048,6 @@ export const INVALID_CHARACTERS = [
export const regexHighlight = /\{([^}]+)\}/g;
export const varHighlightHTML = ({ name }: IVarHighlightType) => {
- const html = `
- {
- ${name}
- }
-
`;
+ const html = `{${name}} `;
return html;
};
diff --git a/src/frontend/tailwind.config.js b/src/frontend/tailwind.config.js
index b3d296560..429b9b26a 100644
--- a/src/frontend/tailwind.config.js
+++ b/src/frontend/tailwind.config.js
@@ -43,6 +43,7 @@ module.exports = {
"accordion-up": "accordion-up 0.2s ease-out",
},
colors: {
+ connection: "var(--connection)",
"almost-dark-gray": "var(--almost-dark-gray)",
"almost-light-blue": "var(--almost-light-blue)",
"almost-medium-blue": "var(--almost-medium-blue)",
@@ -82,6 +83,8 @@ module.exports = {
"status-yellow": "var(--status-yellow)",
"success-background": "var(--success-background)",
"success-foreground": "var(--success-foreground)",
+ "chat-bot-icon": "var(--chat-bot-icon)",
+ "chat-user-icon": "var(--chat-user-icon)",
white: "var(--white)",
border: "hsl(var(--border))",
@@ -127,18 +130,18 @@ module.exports = {
sans: ["var(--font-sans)", ...fontFamily.sans],
},
keyframes: {
- "accordion-down": {
+ slideDown: {
from: { height: 0 },
- to: { height: "var(--radix-accordion-content-height)" },
+ to: { height: 100 },
},
- "accordion-up": {
+ slideUp: {
from: { height: "var(--radix-accordion-content-height)" },
to: { height: 0 },
},
},
animation: {
- "accordion-down": "accordion-down 0.2s ease-out",
- "accordion-up": "accordion-up 0.2s ease-out",
+ "accordion-down": "slideDown 300ms ease-out",
+ "accordion-up": "slideUp 300ms ease-in",
},
},
},
diff --git a/tests/test_agents_template.py b/tests/test_agents_template.py
index bf89f9996..93f4f8b5b 100644
--- a/tests/test_agents_template.py
+++ b/tests/test_agents_template.py
@@ -16,30 +16,94 @@ def test_zero_shot_agent(client: TestClient):
}
template = zero_shot_agent["template"]
- assert template["llm_chain"] == {
+ assert template["tools"] == {
"required": True,
"placeholder": "",
"show": True,
"multiline": False,
"password": False,
- "name": "llm_chain",
- "type": "LLMChain",
+ "name": "tools",
+ "type": "BaseTool",
+ "list": True,
+ "advanced": False,
+ "info": "",
+ }
+
+ # Additional assertions for other template variables
+ assert template["callback_manager"] == {
+ "required": False,
+ "placeholder": "",
+ "show": False,
+ "multiline": False,
+ "password": False,
+ "name": "callback_manager",
+ "type": "BaseCallbackManager",
"list": False,
"advanced": False,
"info": "",
}
- assert template["allowed_tools"] == {
- "required": False,
+ assert template["llm"] == {
+ "required": True,
"placeholder": "",
"show": True,
"multiline": False,
"password": False,
- "name": "allowed_tools",
- "type": "Tool",
+ "name": "llm",
+ "type": "BaseLanguageModel",
+ "list": False,
+ "advanced": False,
+ "info": "",
+ }
+ assert template["output_parser"] == {
+ "required": False,
+ "placeholder": "",
+ "show": False,
+ "multiline": False,
+ "password": False,
+ "name": "output_parser",
+ "type": "AgentOutputParser",
+ "list": False,
+ "advanced": False,
+ "info": "",
+ }
+ assert template["input_variables"] == {
+ "required": False,
+ "placeholder": "",
+ "show": False,
+ "multiline": False,
+ "password": False,
+ "name": "input_variables",
+ "type": "str",
"list": True,
"advanced": False,
"info": "",
}
+ assert template["prefix"] == {
+ "required": False,
+ "placeholder": "",
+ "show": True,
+ "multiline": True,
+ "value": "Answer the following questions as best you can. You have access to the following tools:",
+ "password": False,
+ "name": "prefix",
+ "type": "str",
+ "list": False,
+ "advanced": False,
+ "info": "",
+ }
+ assert template["suffix"] == {
+ "required": False,
+ "placeholder": "",
+ "show": True,
+ "multiline": True,
+ "value": "Begin!\n\nQuestion: {input}\nThought:{agent_scratchpad}",
+ "password": False,
+ "name": "suffix",
+ "type": "str",
+ "list": False,
+ "advanced": False,
+ "info": "",
+ }
def test_json_agent(client: TestClient):
diff --git a/tests/test_chains_template.py b/tests/test_chains_template.py
index 1c28f6536..e183cb0d0 100644
--- a/tests/test_chains_template.py
+++ b/tests/test_chains_template.py
@@ -42,7 +42,7 @@ def test_conversation_chain(client: TestClient):
assert template["verbose"] == {
"required": False,
"placeholder": "",
- "show": True,
+ "show": False,
"multiline": False,
"password": False,
"name": "verbose",
@@ -128,7 +128,7 @@ def test_llm_chain(client: TestClient):
assert template["verbose"] == {
"required": False,
"placeholder": "",
- "show": True,
+ "show": False,
"multiline": False,
"value": False,
"password": False,
@@ -228,7 +228,7 @@ def test_llm_math_chain(client: TestClient):
assert template["verbose"] == {
"required": False,
"placeholder": "",
- "show": True,
+ "show": False,
"multiline": False,
"value": False,
"password": False,