diff --git a/GCP_DEPLOYMENT.md b/GCP_DEPLOYMENT.md
new file mode 100644
index 000000000..edb7e043c
--- /dev/null
+++ b/GCP_DEPLOYMENT.md
@@ -0,0 +1,28 @@
+# Run Langflow from a New Google Cloud Project
+
+This guide will help you set up a Langflow development VM in a Google Cloud Platform project using Google Cloud Shell.
+
+> **Note**: When Cloud Shell opens, be sure to select **Trust repo**. Some `gcloud` commands might not run in an ephemeral Cloud Shell environment.
+
+
+## Standard VM
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+
+This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment.
+
+
+## Spot/Preemptible Instance
+
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+
+When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts.
+
+## Pricing (approximate)
+> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
+
+
+| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
+| -------------- | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | ----- |
+| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs |
+| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance |
+| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month |
diff --git a/README.md b/README.md
index 970496349..3ae891751 100644
--- a/README.md
+++ b/README.md
@@ -19,14 +19,31 @@
LangFlow is a GUI for [LangChain](https://github.com/hwchase17/langchain), designed with [react-flow](https://github.com/wbkd/react-flow) to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chat box.
## 📦 Installation
-
+### Locally
You can install LangFlow from pip:
-`pip install langflow`
+```shell
+pip install langflow
+```
Next, run:
-`langflow`
+```shell
+python -m langflow
+```
+or
+```shell
+langflow
+```
+
+### Deploy Langflow on Google Cloud Platform
+
+Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
+
+Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project.
+
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
+
## 🎨 Creating Flows
diff --git a/scripts/deploy_langflow_gcp.sh b/scripts/deploy_langflow_gcp.sh
new file mode 100644
index 000000000..2c3dc0420
--- /dev/null
+++ b/scripts/deploy_langflow_gcp.sh
@@ -0,0 +1,89 @@
+# Set the VM, image, and networking configuration
+VM_NAME="langflow-dev"
+IMAGE_FAMILY="debian-11"
+IMAGE_PROJECT="debian-cloud"
+BOOT_DISK_SIZE="100GB"
+ZONE="us-central1-a"
+REGION="us-central1"
+VPC_NAME="default"
+SUBNET_NAME="default"
+SUBNET_RANGE="10.128.0.0/20"
+NAT_GATEWAY_NAME="nat-gateway"
+CLOUD_ROUTER_NAME="nat-client"
+
+# Set the GCP project's compute region
+gcloud config set compute/region $REGION
+
+# Check if the VPC exists, and create it if not
+vpc_exists=$(gcloud compute networks list --filter="name=$VPC_NAME" --format="value(name)")
+if [[ -z "$vpc_exists" ]]; then
+ gcloud compute networks create $VPC_NAME --subnet-mode=custom
+fi
+
+# Check if the subnet exists, and create it if not
+subnet_exists=$(gcloud compute networks subnets list --filter="name=$SUBNET_NAME AND region=$REGION" --format="value(name)")
+if [[ -z "$subnet_exists" ]]; then
+ gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
+fi
+
+# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
+firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
+if [[ -z "$firewall_8080_exists" ]]; then
+ gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
+fi
+
+# Create a firewall rule to allow IAP traffic
+firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
+if [[ -z "$firewall_iap_exists" ]]; then
+ gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
+fi
+
+# Define the startup script as a multiline Bash here-doc
+STARTUP_SCRIPT=$(cat <<'EOF'
+#!/bin/bash
+
+# Update and upgrade the system
+apt -y update
+apt -y upgrade
+
+# Install Python 3 pip, Langflow, and Nginx
+apt -y install python3-pip
+pip install langflow
+apt-get -y install nginx
+
+# Configure Nginx for Langflow
+touch /etc/nginx/sites-available/langflow-app
+echo "server {
+ listen 0.0.0.0:8080;
+
+ location / {
+ proxy_pass http://127.0.0.1:7860;
+ proxy_set_header Host "\$host";
+ proxy_set_header X-Real-IP "\$remote_addr";
+ proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
+ }
+}" >> /etc/nginx/sites-available/langflow-app
+ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
+sudo nginx -t
+sudo systemctl restart nginx
+langflow
+EOF
+)
+
+# Create a temporary file to store the startup script
+tempfile=$(mktemp)
+echo "$STARTUP_SCRIPT" > $tempfile
+
+# Create the VM instance with the specified configuration and startup script
+gcloud compute instances create $VM_NAME \
+ --image-family $IMAGE_FAMILY \
+ --image-project $IMAGE_PROJECT \
+ --boot-disk-size $BOOT_DISK_SIZE \
+ --machine-type=n1-standard-4 \
+ --metadata-from-file startup-script=$tempfile \
+ --zone $ZONE \
+ --network $VPC_NAME \
+ --subnet $SUBNET_NAME
+
+# Remove the temporary file after the VM is created
+rm $tempfile
diff --git a/scripts/deploy_langflow_gcp_spot.sh b/scripts/deploy_langflow_gcp_spot.sh
new file mode 100644
index 000000000..065b6013f
--- /dev/null
+++ b/scripts/deploy_langflow_gcp_spot.sh
@@ -0,0 +1,90 @@
+# Set the VM, image, and networking configuration
+VM_NAME="langflow-dev"
+IMAGE_FAMILY="debian-11"
+IMAGE_PROJECT="debian-cloud"
+BOOT_DISK_SIZE="100GB"
+ZONE="us-central1-a"
+REGION="us-central1"
+VPC_NAME="default"
+SUBNET_NAME="default"
+SUBNET_RANGE="10.128.0.0/20"
+NAT_GATEWAY_NAME="nat-gateway"
+CLOUD_ROUTER_NAME="nat-client"
+
+# Set the GCP project's compute region
+gcloud config set compute/region $REGION
+
+# Check if the VPC exists, and create it if not
+vpc_exists=$(gcloud compute networks list --filter="name=$VPC_NAME" --format="value(name)")
+if [[ -z "$vpc_exists" ]]; then
+ gcloud compute networks create $VPC_NAME --subnet-mode=custom
+fi
+
+# Check if the subnet exists, and create it if not
+subnet_exists=$(gcloud compute networks subnets list --filter="name=$SUBNET_NAME AND region=$REGION" --format="value(name)")
+if [[ -z "$subnet_exists" ]]; then
+ gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
+fi
+
+# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
+firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
+if [[ -z "$firewall_8080_exists" ]]; then
+ gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
+fi
+
+# Create a firewall rule to allow IAP traffic
+firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
+if [[ -z "$firewall_iap_exists" ]]; then
+ gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
+fi
+
+# Define the startup script as a multiline Bash here-doc
+STARTUP_SCRIPT=$(cat <<'EOF'
+#!/bin/bash
+
+# Update and upgrade the system
+apt -y update
+apt -y upgrade
+
+# Install Python 3 pip, Langflow, and Nginx
+apt -y install python3-pip
+pip install langflow
+apt-get -y install nginx
+
+# Configure Nginx for Langflow
+touch /etc/nginx/sites-available/langflow-app
+echo "server {
+ listen 0.0.0.0:8080;
+
+ location / {
+ proxy_pass http://127.0.0.1:7860;
+ proxy_set_header Host "\$host";
+ proxy_set_header X-Real-IP "\$remote_addr";
+ proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
+ }
+}" >> /etc/nginx/sites-available/langflow-app
+ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
+sudo nginx -t
+sudo systemctl restart nginx
+langflow
+EOF
+)
+
+# Create a temporary file to store the startup script
+tempfile=$(mktemp)
+echo "$STARTUP_SCRIPT" > $tempfile
+
+# Create the VM instance with the specified configuration and startup script
+gcloud compute instances create $VM_NAME \
+ --image-family $IMAGE_FAMILY \
+ --image-project $IMAGE_PROJECT \
+ --boot-disk-size $BOOT_DISK_SIZE \
+ --machine-type=n1-standard-4 \
+ --metadata-from-file startup-script=$tempfile \
+ --zone $ZONE \
+ --network $VPC_NAME \
+ --subnet $SUBNET_NAME \
+ -preemptible
+
+# Remove the temporary file after the VM is created
+rm $tempfile
diff --git a/scripts/walkthroughtutorial.md b/scripts/walkthroughtutorial.md
new file mode 100644
index 000000000..fa6e3c11d
--- /dev/null
+++ b/scripts/walkthroughtutorial.md
@@ -0,0 +1,86 @@
+# Deploy Langflow on Google Cloud Platform
+
+**Duration**: 45 minutes
+**Author**: [Robert Wilkins III](https://www.linkedin.com/in/robertwilkinsiii)
+
+## Introduction
+
+In this tutorial, you will learn how to deploy Langflow on [Google Cloud Platform](https://cloud.google.com/) (GCP) using Google Cloud Shell.
+
+This tutorial assumes you have a GCP account and basic knowledge of Google Cloud Shell. If you're not familiar with Cloud Shell, you can review the [Cloud Shell documentation](https://cloud.google.com/shell/docs).
+
+## Set up your environment
+
+Before you start, make sure you have the following prerequisites:
+
+- A GCP account with the necessary permissions to create resources
+- A project on GCP where you want to deploy Langflow
+
+[**Select your GCP project**]
+
+
+
+In the next step, you'll configure the GCP environment and deploy Langflow.
+
+## Configure the GCP environment and deploy Langflow
+Run the deploy_langflow_gcp.sh script to configure the GCP environment and deploy Langflow:
+
+```sh
+gcloud config set project
+bash ./deploy_langflow_gcp.sh
+```
+
+The script will:
+
+1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
+2. Create a startup script to install Python, Langflow, and Nginx
+3. Create a Compute Engine VM instance with the specified configuration and startup script
+4. Configure Nginx to serve Langflow on TCP port 8080
+
+
+> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
+
+In the next step, you'll learn how to connect to the Langflow VM.
+
+## Connect to the Langflow VM
+To connect to your new Langflow VM, follow these steps:
+
+1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
+
**or**
+3. Run the following command to display the URL for your Langflow environment:
+```bash
+export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
+
+echo http://$LANGFLOW_IP:8080
+```
+
+4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
+
+Congratulations! You have successfully deployed Langflow on Google Cloud Platform.
+
+
+
+## Cleanup
+If you want to remove the resources created during this tutorial, you can use the following commands:
+
+```sql
+gcloud compute instances delete langflow-dev --zone us-central1-a --quiet
+```
+The following network settings and services are used during this walkthrough. If you plan to continue using the project after the walkthrough, you may keep these configurations in place.
+
+However, if you decide to remove them after completing the walkthrough, you can use the following gcloud commands:
+
+
+> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
+
+```
+gcloud compute firewall-rules delete allow-tcp-8080 --quiet
+
+gcloud compute firewall-rules delete allow-iap --quiet
+
+gcloud compute networks subnets delete default --region us-central1 --quiet
+
+gcloud compute networks delete default --quiet
+```
diff --git a/scripts/walkthroughtutorial_spot.md b/scripts/walkthroughtutorial_spot.md
new file mode 100644
index 000000000..751f03d78
--- /dev/null
+++ b/scripts/walkthroughtutorial_spot.md
@@ -0,0 +1,83 @@
+# Deploy Langflow on Google Cloud Platform
+
+**Duration**: 45 minutes
+**Author**: [Robert Wilkins III](https://www.linkedin.com/in/robertwilkinsiii)
+
+## Introduction
+
+In this tutorial, you will learn how to deploy Langflow on [Google Cloud Platform](https://cloud.google.com/) (GCP) using Google Cloud Shell.
+
+This tutorial assumes you have a GCP account and basic knowledge of Google Cloud Shell. If you're not familiar with Cloud Shell, you can review the [Cloud Shell documentation](https://cloud.google.com/shell/docs).
+
+## Set up your environment
+
+Before you start, make sure you have the following prerequisites:
+
+- A GCP account with the necessary permissions to create resources
+- A project on GCP where you want to deploy Langflow
+
+[**Select your GCP project**]
+
+
+
+In the next step, you'll configure the GCP environment and deploy Langflow.
+
+## Configure the GCP environment and deploy Langflow
+Run the deploy_langflow_gcp_spot.sh script to configure the GCP environment and deploy Langflow:
+
+```sh
+gcloud config set project
+bash ./deploy_langflow_gcp.sh
+```
+
+The script will:
+
+1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
+2. Create a startup script to install Python, Langflow, and Nginx
+3. Create a Compute Engine VM instance with the specified configuration and startup script
+4. Configure Nginx to serve Langflow on TCP port 8080
+
+> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
+
+In the next step, you'll learn how to connect to the Langflow VM.
+
+## Connect to the Langflow VM
+To connect to your new Langflow VM, follow these steps:
+
+1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
+
**or**
+3. Run the following command to display the URL for your Langflow environment:
+```bash
+export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
+
+echo http://$LANGFLOW_IP:8080
+```
+
+4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
+
+Congratulations! You have successfully deployed Langflow on Google Cloud Platform.
+
+
+
+## Cleanup
+If you want to remove the resources created during this tutorial, you can use the following commands:
+
+```sql
+gcloud compute instances delete langflow-dev --zone us-central1-a --quiet
+```
+The following network settings and services are used during this walkthrough. If you plan to continue using the project after the walkthrough, you may keep these configurations in place.
+
+However, if you decide to remove them after completing the walkthrough, you can use the following gcloud commands:
+> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
+
+```
+gcloud compute firewall-rules delete allow-tcp-8080 --quiet
+
+gcloud compute firewall-rules delete allow-iap --quiet
+
+gcloud compute networks subnets delete default --region us-central1 --quiet
+
+gcloud compute networks delete default --quiet
+```
diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml
index 7f00167b4..9236d5996 100644
--- a/src/backend/langflow/config.yaml
+++ b/src/backend/langflow/config.yaml
@@ -6,6 +6,7 @@ chains:
- SeriesCharacterChain
- MidJourneyPromptChain
- TimeTravelGuideChain
+ - SQLDatabaseChain
agents:
- ZeroShotAgent
@@ -40,6 +41,27 @@ tools:
- Tool
- PythonFunction
- JsonSpec
+ - News API
+ - TMDB API
+ - Podcast API
+ - QuerySQLDataBaseTool
+ - InfoSQLDatabaseTool
+ - ListSQLDatabaseTool
+ # - QueryCheckerTool
+ - BingSearchRun
+ - GoogleSearchRun
+ - GoogleSearchResults
+ - JsonListKeysTool
+ - JsonGetValueTool
+ - PythonREPLTool
+ - PythonAstREPLTool
+ - RequestsGetTool
+ - RequestsPostTool
+ - RequestsPatchTool
+ - RequestsPutTool
+ - RequestsDeleteTool
+ - WikipediaQueryRun
+ - WolframAlphaQueryRun
wrappers:
- RequestsWrapper
@@ -91,4 +113,16 @@ documentloaders:
textsplitters:
- CharacterTextSplitter
+utilities:
+ - BingSearchAPIWrapper
+ - GoogleSearchAPIWrapper
+ - GoogleSerperAPIWrapper
+ - SearxResults
+ - SearxSearchWrapper
+ - SerpAPIWrapper
+ - WikipediaAPIWrapper
+ - WolframAlphaAPIWrapper
+ # - ZapierNLAWrapper
+ - SQLDatabase
+
dev: false
diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py
index e77b81ec6..d45221be7 100644
--- a/src/backend/langflow/custom/customs.py
+++ b/src/backend/langflow/custom/customs.py
@@ -12,6 +12,9 @@ CUSTOM_NODES = {
"VectorStoreRouterAgent": nodes.VectorStoreRouterAgentNode(),
"SQLAgent": nodes.SQLAgentNode(),
},
+ "utilities": {
+ "SQLDatabase": nodes.SQLDatabaseNode(),
+ },
}
diff --git a/src/backend/langflow/graph/base.py b/src/backend/langflow/graph/base.py
index ff586c6da..6d998eed6 100644
--- a/src/backend/langflow/graph/base.py
+++ b/src/backend/langflow/graph/base.py
@@ -202,7 +202,11 @@ class Node:
"VectorStoreRouterAgent",
"VectorStoreAgent",
"VectorStoreInfo",
- ] or self.node_type in ["VectorStoreInfo", "VectorStoreRouterToolkit"]:
+ ] or self.node_type in [
+ "VectorStoreInfo",
+ "VectorStoreRouterToolkit",
+ "SQLDatabase",
+ ]:
return self._built_object
return deepcopy(self._built_object)
diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py
index 7296a0c0d..018174334 100644
--- a/src/backend/langflow/graph/nodes.py
+++ b/src/backend/langflow/graph/nodes.py
@@ -101,6 +101,10 @@ class ChainNode(Node):
self.params[key] = value.build(tools=tools, force=force)
self._build()
+
+ #! Cannot deepcopy SQLDatabaseChain
+ if self.node_type in ["SQLDatabaseChain"]:
+ return self._built_object
return deepcopy(self._built_object)
diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py
index 2ffc18b14..f07b03f04 100644
--- a/src/backend/langflow/interface/custom_lists.py
+++ b/src/backend/langflow/interface/custom_lists.py
@@ -9,10 +9,12 @@ from langchain import (
memory,
requests,
text_splitter,
+ utilities,
vectorstores,
)
from langchain.agents import agent_toolkits
from langchain.chat_models import ChatOpenAI
+from langchain.sql_database import SQLDatabase
from langflow.interface.importing.utils import import_class
@@ -76,3 +78,9 @@ documentloaders_type_to_cls_dict: dict[str, Any] = {
textsplitter_type_to_cls_dict: dict[str, Any] = dict(
inspect.getmembers(text_splitter, inspect.isclass)
)
+
+## Utilities
+utility_type_to_cls_dict: dict[str, Any] = dict(
+ inspect.getmembers(utilities, inspect.isclass)
+)
+utility_type_to_cls_dict["SQLDatabase"] = SQLDatabase
diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py
index a3480928e..e303da0eb 100644
--- a/src/backend/langflow/interface/importing/utils.py
+++ b/src/backend/langflow/interface/importing/utils.py
@@ -10,7 +10,7 @@ from langchain.chat_models.base import BaseChatModel
from langchain.llms.base import BaseLLM
from langchain.tools import BaseTool
-from langflow.interface.tools.util import get_tool_by_name
+from langflow.interface.tools.base import tool_creator
def import_module(module_path: str) -> Any:
@@ -44,6 +44,7 @@ def import_by_type(_type: str, name: str) -> Any:
"vectorstores": import_vectorstore,
"documentloaders": import_documentloader,
"textsplitters": import_textsplitter,
+ "utilities": import_utility,
}
if _type == "llms":
key = "chat" if "chat" in name.lower() else "llm"
@@ -107,7 +108,7 @@ def import_llm(llm: str) -> BaseLLM:
def import_tool(tool: str) -> BaseTool:
"""Import tool from tool name"""
- return get_tool_by_name(tool)
+ return tool_creator.type_to_loader_dict[tool]["fcn"]
def import_chain(chain: str) -> Type[Chain]:
@@ -131,10 +132,16 @@ def import_vectorstore(vectorstore: str) -> Any:
def import_documentloader(documentloader: str) -> Any:
"""Import documentloader from documentloader name"""
-
return import_class(f"langchain.document_loaders.{documentloader}")
def import_textsplitter(textsplitter: str) -> Any:
"""Import textsplitter from textsplitter name"""
return import_class(f"langchain.text_splitter.{textsplitter}")
+
+
+def import_utility(utility: str) -> Any:
+ """Import utility from utility name"""
+ if utility == "SQLDatabase":
+ return import_class(f"langchain.sql_database.{utility}")
+ return import_class(f"langchain.utilities.{utility}")
diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py
index cf45fd9c5..3d73105c2 100644
--- a/src/backend/langflow/interface/listing.py
+++ b/src/backend/langflow/interface/listing.py
@@ -8,6 +8,7 @@ from langflow.interface.prompts.base import prompt_creator
from langflow.interface.text_splitters.base import textsplitter_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
+from langflow.interface.utilities.base import utility_creator
from langflow.interface.vector_store.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
@@ -26,6 +27,7 @@ def get_type_dict():
"vectorStore": vectorstore_creator.to_list(),
"embeddings": embedding_creator.to_list(),
"textSplitters": textsplitter_creator.to_list(),
+ "utilities": utility_creator.to_list(),
}
diff --git a/src/backend/langflow/interface/loading.py b/src/backend/langflow/interface/loading.py
index 1b404f38c..4cf702ea2 100644
--- a/src/backend/langflow/interface/loading.py
+++ b/src/backend/langflow/interface/loading.py
@@ -82,6 +82,9 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
documents = params.pop("documents")
text_splitter = class_object(**params)
return text_splitter.split_documents(documents)
+ elif base_type == "utilities":
+ if node_type == "SQLDatabase":
+ return class_object.from_uri(params.pop("uri"))
return class_object(**params)
@@ -91,7 +94,7 @@ def load_flow_from_json(path: str, build=True):
from langflow.graph import Graph
"""Load flow from json file"""
- with open(path, "r") as f:
+ with open(path, "r", encoding="utf-8") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py
index 300e09e01..deba28586 100644
--- a/src/backend/langflow/interface/run.py
+++ b/src/backend/langflow/interface/run.py
@@ -1,6 +1,7 @@
import contextlib
import io
from typing import Any, Dict
+from chromadb.errors import NotEnoughElementsException
from langflow.cache.utils import compute_dict_hash, load_cache, memoize_dict
from langflow.graph.graph import Graph
@@ -230,6 +231,10 @@ def get_result_and_thought_using_graph(langchain_object, message: str):
else:
thought = output_buffer.getvalue()
+ except NotEnoughElementsException as exc:
+ raise ValueError(
+ "Error: Not enough documents for ChromaDB to index. Try reducing chunk size in TextSplitter."
+ ) from exc
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought
diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py
index 6a3439bf0..5fd0c72f0 100644
--- a/src/backend/langflow/interface/tools/base.py
+++ b/src/backend/langflow/interface/tools/base.py
@@ -1,7 +1,6 @@
from typing import Dict, List, Optional
from langchain.agents.load_tools import (
- _BASE_TOOLS,
_EXTRA_LLM_TOOLS,
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
@@ -10,17 +9,16 @@ from langchain.agents.load_tools import (
from langflow.custom import customs
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.tools.constants import (
+ ALL_TOOLS_NAMES,
CUSTOM_TOOLS,
FILE_TOOLS,
+ OTHER_TOOLS,
)
-from langflow.interface.tools.util import (
- get_tool_by_name,
- get_tool_params,
- get_tools_dict,
-)
+from langflow.interface.tools.util import get_tool_params
from langflow.settings import settings
from langflow.template.base import Template, TemplateField
from langflow.utils import util
+from langflow.utils.util import build_template_from_class
TOOL_INPUTS = {
"str": TemplateField(
@@ -66,64 +64,81 @@ class ToolCreator(LangChainTypeCreator):
@property
def type_to_loader_dict(self) -> Dict:
if self.tools_dict is None:
- self.tools_dict = get_tools_dict()
+ all_tools = {}
+ for tool, tool_fcn in ALL_TOOLS_NAMES.items():
+ tool_params = get_tool_params(tool_fcn)
+ tool_name = tool_params.get("name", tool)
+
+ if tool_name in settings.tools or settings.dev:
+ if tool_name == "JsonSpec":
+ tool_params["path"] = tool_params.pop("dict_") # type: ignore
+ all_tools[tool_name] = {
+ "type": tool,
+ "params": tool_params,
+ "fcn": tool_fcn,
+ }
+
+ self.tools_dict = all_tools
+
return self.tools_dict
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of a tool."""
base_classes = ["Tool"]
- all_tools = {}
- for tool in self.type_to_loader_dict.keys():
- tool_fcn = get_tool_by_name(tool)
- if tool_params := get_tool_params(tool_fcn):
- tool_name = tool_params.get("name") or str(tool)
- all_tools[tool_name] = {
- "type": tool,
- "params": tool_params,
- "fcn": tool_fcn,
- }
+ fields = []
+ params = []
+ tool_params = {}
# Raise error if name is not in tools
- if name not in all_tools.keys():
+ if name not in self.type_to_loader_dict.keys():
raise ValueError("Tool not found")
- tool_type: str = all_tools[name]["type"] # type: ignore
+ tool_type: str = self.type_to_loader_dict[name]["type"] # type: ignore
- if all_tools[tool_type]["fcn"] in _BASE_TOOLS.values():
- params = []
- elif all_tools[tool_type]["fcn"] in _LLM_TOOLS.values():
+ # if tool_type in _BASE_TOOLS.keys():
+ # params = []
+ if tool_type in _LLM_TOOLS.keys():
params = ["llm"]
- elif all_tools[tool_type]["fcn"] in [
- val[0] for val in _EXTRA_LLM_TOOLS.values()
- ]:
- n_dict = {val[0]: val[1] for val in _EXTRA_LLM_TOOLS.values()}
- extra_keys = n_dict[all_tools[tool_type]["fcn"]]
+ elif tool_type in _EXTRA_LLM_TOOLS.keys():
+ extra_keys = _EXTRA_LLM_TOOLS[tool_type][1]
params = ["llm"] + extra_keys
- elif all_tools[tool_type]["fcn"] in [
- val[0] for val in _EXTRA_OPTIONAL_TOOLS.values()
- ]:
- n_dict = {val[0]: val[1] for val in _EXTRA_OPTIONAL_TOOLS.values()} # type: ignore
- extra_keys = n_dict[all_tools[tool_type]["fcn"]]
+ elif tool_type in _EXTRA_OPTIONAL_TOOLS.keys():
+ extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type][1]
params = extra_keys
# elif tool_type == "Tool":
# params = ["name", "description", "func"]
elif tool_type in CUSTOM_TOOLS:
# Get custom tool params
- params = all_tools[name]["params"] # type: ignore
+ params = self.type_to_loader_dict[name]["params"] # type: ignore
base_classes = ["function"]
if node := customs.get_custom_nodes("tools").get(tool_type):
return node
elif tool_type in FILE_TOOLS:
- params = all_tools[name]["params"] # type: ignore
- if tool_type == "JsonSpec":
- params["path"] = params.pop("dict_") # type: ignore
+ params = self.type_to_loader_dict[name]["params"] # type: ignore
base_classes += [name]
- else:
- params = []
+ elif tool_type in OTHER_TOOLS:
+ print(tool_type)
+ tool_dict = build_template_from_class(tool_type, OTHER_TOOLS)
+ fields = tool_dict["template"]
+
+ # Pop unnecessary fields and add name
+ fields.pop("_type") # type: ignore
+ fields.pop("return_direct") # type: ignore
+ fields.pop("verbose") # type: ignore
+
+ tool_params = {
+ "name": fields.pop("name")["value"], # type: ignore
+ "description": fields.pop("description")["value"], # type: ignore
+ }
+
+ fields = [
+ TemplateField(name=name, field_type=field["type"], **field)
+ for name, field in fields.items() # type: ignore
+ ]
+ base_classes += tool_dict["base_classes"]
# Copy the field and add the name
- fields = []
for param in params:
field = TOOL_INPUTS.get(param, TOOL_INPUTS["str"]).copy()
field.name = param
@@ -134,7 +149,7 @@ class ToolCreator(LangChainTypeCreator):
template = Template(fields=fields, type_name=tool_type)
- tool_params = all_tools[name]["params"]
+ tool_params = {**tool_params, **self.type_to_loader_dict[name]["params"]}
return {
"template": util.format_dict(template.to_dict()),
**tool_params,
@@ -144,21 +159,7 @@ class ToolCreator(LangChainTypeCreator):
def to_list(self) -> List[str]:
"""List all load tools"""
- tools = []
-
- for tool, fcn in get_tools_dict().items():
- tool_params = get_tool_params(fcn)
-
- if tool_params and not tool_params.get("name"):
- tool_params["name"] = tool
-
- if tool_params and (
- tool_params.get("name") in settings.tools
- or (tool_params.get("name") and settings.dev)
- ):
- tools.append(tool_params["name"])
-
- return tools
+ return list(self.type_to_loader_dict.keys())
tool_creator = ToolCreator()
diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py
index 2ec20cef9..34890a684 100644
--- a/src/backend/langflow/interface/tools/constants.py
+++ b/src/backend/langflow/interface/tools/constants.py
@@ -5,12 +5,50 @@ from langchain.agents.load_tools import (
_EXTRA_OPTIONAL_TOOLS,
_LLM_TOOLS,
)
-from langchain.tools.json.tool import JsonSpec
+from langchain.tools.bing_search.tool import BingSearchRun
+from langchain.tools.google_search.tool import GoogleSearchResults, GoogleSearchRun
+from langchain.tools.json.tool import JsonGetValueTool, JsonListKeysTool, JsonSpec
+from langchain.tools.python.tool import PythonAstREPLTool, PythonREPLTool
+from langchain.tools.requests.tool import (
+ RequestsDeleteTool,
+ RequestsGetTool,
+ RequestsPatchTool,
+ RequestsPostTool,
+ RequestsPutTool,
+)
+from langchain.tools.sql_database.tool import (
+ InfoSQLDatabaseTool,
+ ListSQLDatabaseTool,
+ QueryCheckerTool,
+ QuerySQLDataBaseTool,
+)
+from langchain.tools.wikipedia.tool import WikipediaQueryRun
+from langchain.tools.wolfram_alpha.tool import WolframAlphaQueryRun
from langflow.interface.tools.custom import PythonFunction
FILE_TOOLS = {"JsonSpec": JsonSpec}
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
+OTHER_TOOLS = {
+ "QuerySQLDataBaseTool": QuerySQLDataBaseTool,
+ "InfoSQLDatabaseTool": InfoSQLDatabaseTool,
+ "ListSQLDatabaseTool": ListSQLDatabaseTool,
+ "QueryCheckerTool": QueryCheckerTool,
+ "BingSearchRun": BingSearchRun,
+ "GoogleSearchRun": GoogleSearchRun,
+ "GoogleSearchResults": GoogleSearchResults,
+ "JsonListKeysTool": JsonListKeysTool,
+ "JsonGetValueTool": JsonGetValueTool,
+ "PythonREPLTool": PythonREPLTool,
+ "PythonAstREPLTool": PythonAstREPLTool,
+ "RequestsGetTool": RequestsGetTool,
+ "RequestsPostTool": RequestsPostTool,
+ "RequestsPatchTool": RequestsPatchTool,
+ "RequestsPutTool": RequestsPutTool,
+ "RequestsDeleteTool": RequestsDeleteTool,
+ "WikipediaQueryRun": WikipediaQueryRun,
+ "WolframAlphaQueryRun": WolframAlphaQueryRun,
+}
ALL_TOOLS_NAMES = {
**_BASE_TOOLS,
**_LLM_TOOLS, # type: ignore
@@ -18,4 +56,5 @@ ALL_TOOLS_NAMES = {
**{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
**CUSTOM_TOOLS,
**FILE_TOOLS, # type: ignore
+ **OTHER_TOOLS,
}
diff --git a/src/backend/langflow/interface/tools/util.py b/src/backend/langflow/interface/tools/util.py
index 8f8673b6b..f1d66696a 100644
--- a/src/backend/langflow/interface/tools/util.py
+++ b/src/backend/langflow/interface/tools/util.py
@@ -4,29 +4,6 @@ from typing import Dict, Union
from langchain.agents.tools import Tool
-from langflow.interface.tools.constants import ALL_TOOLS_NAMES
-
-
-def get_tools_dict():
- """Get the tools dictionary."""
-
- all_tools = {}
-
- for tool, fcn in ALL_TOOLS_NAMES.items():
- if tool_params := get_tool_params(fcn):
- tool_name = tool_params.get("name") or str(tool)
- all_tools[tool_name] = fcn
-
- return all_tools
-
-
-def get_tool_by_name(name: str):
- """Get a tool from the tools dictionary."""
- tools = get_tools_dict()
- if name not in tools:
- raise ValueError(f"{name} not found.")
- return tools[name]
-
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
tree = ast.parse(inspect.getsource(func))
@@ -113,6 +90,8 @@ def get_tool_params(tool, **kwargs) -> Dict:
elif inspect.isclass(tool):
# Get the parameters necessary to
# instantiate the class
+
return get_class_tool_params(tool, **kwargs) or {}
+
else:
raise ValueError("Tool must be a function or class.")
diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py
index fd5d9ec3e..085537756 100644
--- a/src/backend/langflow/interface/types.py
+++ b/src/backend/langflow/interface/types.py
@@ -8,6 +8,7 @@ from langflow.interface.prompts.base import prompt_creator
from langflow.interface.text_splitters.base import textsplitter_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
+from langflow.interface.utilities.base import utility_creator
from langflow.interface.vector_store.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
@@ -42,6 +43,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
vectorstore_creator,
documentloader_creator,
textsplitter_creator,
+ utility_creator,
]
all_types = {}
diff --git a/src/backend/langflow/interface/utilities/__init__.py b/src/backend/langflow/interface/utilities/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/langflow/interface/utilities/base.py
new file mode 100644
index 000000000..e60e344ad
--- /dev/null
+++ b/src/backend/langflow/interface/utilities/base.py
@@ -0,0 +1,39 @@
+from typing import Dict, List, Optional
+
+from langflow.custom.customs import get_custom_nodes
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import utility_type_to_cls_dict
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class UtilityCreator(LangChainTypeCreator):
+ type_name: str = "utilities"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ return utility_type_to_cls_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of a utility."""
+ try:
+ if name in get_custom_nodes(self.type_name).keys():
+ return get_custom_nodes(self.type_name)[name]
+ return build_template_from_class(name, utility_type_to_cls_dict)
+ except ValueError as exc:
+ raise ValueError(f"Utility {name} not found") from exc
+
+ except AttributeError as exc:
+ logger.error(f"Utility {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ utility.__name__
+ for utility in self.type_to_loader_dict.values()
+ if utility.__name__ in settings.utilities or settings.dev
+ ]
+
+
+utility_creator = UtilityCreator()
diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py
index c5377c85a..48aa5939d 100644
--- a/src/backend/langflow/settings.py
+++ b/src/backend/langflow/settings.py
@@ -18,6 +18,7 @@ class Settings(BaseSettings):
wrappers: List[str] = []
toolkits: List[str] = []
textsplitters: List[str] = []
+ utilities: List[str] = []
dev: bool = False
class Config:
@@ -42,6 +43,7 @@ class Settings(BaseSettings):
self.wrappers = new_settings.wrappers or []
self.toolkits = new_settings.toolkits or []
self.textsplitters = new_settings.textsplitters or []
+ self.utilities = new_settings.utilities or []
self.dev = new_settings.dev or False
diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py
index 6ac026e59..f2e8bd94f 100644
--- a/src/backend/langflow/template/nodes.py
+++ b/src/backend/langflow/template/nodes.py
@@ -256,6 +256,29 @@ class CSVAgentNode(FrontendNode):
return super().to_dict()
+class SQLDatabaseNode(FrontendNode):
+ name: str = "SQLDatabase"
+ template: Template = Template(
+ type_name="sql_database",
+ fields=[
+ TemplateField(
+ field_type="str",
+ required=True,
+ is_list=False,
+ show=True,
+ multiline=False,
+ value="",
+ name="uri",
+ ),
+ ],
+ )
+ description: str = """SQLAlchemy wrapper around a database."""
+ base_classes: list[str] = ["SQLDatabase"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
class VectorStoreAgentNode(FrontendNode):
name: str = "VectorStoreAgent"
template: Template = Template(
diff --git a/src/frontend/src/utils.ts b/src/frontend/src/utils.ts
index 5ab47f31e..405d56297 100644
--- a/src/frontend/src/utils.ts
+++ b/src/frontend/src/utils.ts
@@ -13,7 +13,8 @@ import {
QuestionMarkCircleIcon,
FingerPrintIcon,
ScissorsIcon,
- CircleStackIcon
+ CircleStackIcon,
+ Squares2X2Icon
} from "@heroicons/react/24/outline";
import { Connection, Edge, Node, ReactFlowInstance } from "reactflow";
import { FlowType } from "./types/flow";
@@ -85,6 +86,7 @@ export const nodeColors: {[char: string]: string} = {
textsplitters: "#B47CB5",
toolkits:"#DB2C2C",
wrappers:"#E6277A",
+ utilities:"#31A3CC",
unknown:"#9CA3AF"
};
@@ -103,6 +105,7 @@ export const nodeNames:{[char: string]: string} = {
toolkits:"Toolkits",
wrappers:"Wrappers",
textsplitters: "Text Splitters",
+ utilities:"Utilities",
unknown:"Unknown"
};
@@ -121,6 +124,7 @@ export const nodeIcons:{[char: string]: React.ForwardRefExoticComponent