diff --git a/GCP_DEPLOYMENT.md b/GCP_DEPLOYMENT.md new file mode 100644 index 000000000..edb7e043c --- /dev/null +++ b/GCP_DEPLOYMENT.md @@ -0,0 +1,28 @@ +# Run Langflow from a New Google Cloud Project + +This guide will help you set up a Langflow development VM in a Google Cloud Platform project using Google Cloud Shell. + +> **Note**: When Cloud Shell opens, be sure to select **Trust repo**. Some `gcloud` commands might not run in an ephemeral Cloud Shell environment. + + +## Standard VM +[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md) + +This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment. +
+ +## Spot/Preemptible Instance + +[![Open in Cloud Shell - Spot Instance](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md) + +When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts. + +## Pricing (approximate) +> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator) +
+ +| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes | +| -------------- | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | ----- | +| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs | +| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance | +| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month | diff --git a/README.md b/README.md index 970496349..3ae891751 100644 --- a/README.md +++ b/README.md @@ -19,14 +19,31 @@ LangFlow is a GUI for [LangChain](https://github.com/hwchase17/langchain), designed with [react-flow](https://github.com/wbkd/react-flow) to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chat box. ## 📦 Installation - +### Locally You can install LangFlow from pip: -`pip install langflow` +```shell +pip install langflow +``` Next, run: -`langflow` +```shell +python -m langflow +``` +or +```shell +langflow +``` + +### Deploy Langflow on Google Cloud Platform + +Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document. + +Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project. + +[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md) + ## 🎨 Creating Flows diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..6d722a3cc --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "reactFlow", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/poetry.lock b/poetry.lock index 01f7dc4a6..5419d50c1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "aiohttp" @@ -3837,7 +3837,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -4016,6 +4016,10 @@ category = "main" optional = false python-versions = ">=3.8.0" files = [ + {file = "torch-2.0.0-1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c9090bda7d2eeeecd74f51b721420dbeb44f838d4536cc1b284e879417e3064a"}, + {file = "torch-2.0.0-1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:bd42db2a48a20574d2c33489e120e9f32789c4dc13c514b0c44272972d14a2d7"}, + {file = "torch-2.0.0-1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8969aa8375bcbc0c2993e7ede0a7f889df9515f18b9b548433f412affed478d9"}, + {file = "torch-2.0.0-1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ab2da16567cb55b67ae39e32d520d68ec736191d88ac79526ca5874754c32203"}, {file = "torch-2.0.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:7a9319a67294ef02459a19738bbfa8727bb5307b822dadd708bc2ccf6c901aca"}, {file = "torch-2.0.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9f01fe1f6263f31bd04e1757946fd63ad531ae37f28bb2dbf66f5c826ee089f4"}, {file = "torch-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:527f4ae68df7b8301ee6b1158ca56350282ea633686537b30dbb5d7b4a52622a"}, @@ -4233,6 +4237,15 @@ category = "main" optional = false python-versions = "*" files = [ + {file = "triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38806ee9663f4b0f7cd64790e96c579374089e58f49aac4a6608121aa55e2505"}, + {file = "triton-2.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:226941c7b8595219ddef59a1fdb821e8c744289a132415ddd584facedeb475b1"}, + {file = "triton-2.0.0-1-cp36-cp36m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4c9fc8c89874bc48eb7e7b2107a9b8d2c0bf139778637be5bfccb09191685cfd"}, + {file = "triton-2.0.0-1-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d2684b6a60b9f174f447f36f933e9a45f31db96cb723723ecd2dcfd1c57b778b"}, + {file = "triton-2.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9d4978298b74fcf59a75fe71e535c092b023088933b2f1df933ec32615e4beef"}, + {file = "triton-2.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:74f118c12b437fb2ca25e1a04759173b517582fcf4c7be11913316c764213656"}, + {file = "triton-2.0.0-1-pp37-pypy37_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9618815a8da1d9157514f08f855d9e9ff92e329cd81c0305003eb9ec25cc5add"}, + {file = "triton-2.0.0-1-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1aca3303629cd3136375b82cb9921727f804e47ebee27b2677fef23005c3851a"}, + {file = "triton-2.0.0-1-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e3e13aa8b527c9b642e3a9defcc0fbd8ffbe1c80d8ac8c15a01692478dc64d8a"}, {file = "triton-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f05a7e64e4ca0565535e3d5d3405d7e49f9d308505bb7773d21fb26a4c008c2"}, {file = "triton-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4b99ca3c6844066e516658541d876c28a5f6e3a852286bbc97ad57134827fd"}, {file = "triton-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47b4d70dc92fb40af553b4460492c31dc7d3a114a979ffb7a5cdedb7eb546c08"}, diff --git a/scripts/deploy_langflow_gcp.sh b/scripts/deploy_langflow_gcp.sh new file mode 100644 index 000000000..2c3dc0420 --- /dev/null +++ b/scripts/deploy_langflow_gcp.sh @@ -0,0 +1,89 @@ +# Set the VM, image, and networking configuration +VM_NAME="langflow-dev" +IMAGE_FAMILY="debian-11" +IMAGE_PROJECT="debian-cloud" +BOOT_DISK_SIZE="100GB" +ZONE="us-central1-a" +REGION="us-central1" +VPC_NAME="default" +SUBNET_NAME="default" +SUBNET_RANGE="10.128.0.0/20" +NAT_GATEWAY_NAME="nat-gateway" +CLOUD_ROUTER_NAME="nat-client" + +# Set the GCP project's compute region +gcloud config set compute/region $REGION + +# Check if the VPC exists, and create it if not +vpc_exists=$(gcloud compute networks list --filter="name=$VPC_NAME" --format="value(name)") +if [[ -z "$vpc_exists" ]]; then + gcloud compute networks create $VPC_NAME --subnet-mode=custom +fi + +# Check if the subnet exists, and create it if not +subnet_exists=$(gcloud compute networks subnets list --filter="name=$SUBNET_NAME AND region=$REGION" --format="value(name)") +if [[ -z "$subnet_exists" ]]; then + gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE +fi + +# Create a firewall rule to allow TCP port 8080 for all instances in the VPC +firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)") +if [[ -z "$firewall_8080_exists" ]]; then + gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS +fi + +# Create a firewall rule to allow IAP traffic +firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)") +if [[ -z "$firewall_iap_exists" ]]; then + gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS +fi + +# Define the startup script as a multiline Bash here-doc +STARTUP_SCRIPT=$(cat <<'EOF' +#!/bin/bash + +# Update and upgrade the system +apt -y update +apt -y upgrade + +# Install Python 3 pip, Langflow, and Nginx +apt -y install python3-pip +pip install langflow +apt-get -y install nginx + +# Configure Nginx for Langflow +touch /etc/nginx/sites-available/langflow-app +echo "server { + listen 0.0.0.0:8080; + + location / { + proxy_pass http://127.0.0.1:7860; + proxy_set_header Host "\$host"; + proxy_set_header X-Real-IP "\$remote_addr"; + proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for"; + } +}" >> /etc/nginx/sites-available/langflow-app +ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/ +sudo nginx -t +sudo systemctl restart nginx +langflow +EOF +) + +# Create a temporary file to store the startup script +tempfile=$(mktemp) +echo "$STARTUP_SCRIPT" > $tempfile + +# Create the VM instance with the specified configuration and startup script +gcloud compute instances create $VM_NAME \ + --image-family $IMAGE_FAMILY \ + --image-project $IMAGE_PROJECT \ + --boot-disk-size $BOOT_DISK_SIZE \ + --machine-type=n1-standard-4 \ + --metadata-from-file startup-script=$tempfile \ + --zone $ZONE \ + --network $VPC_NAME \ + --subnet $SUBNET_NAME + +# Remove the temporary file after the VM is created +rm $tempfile diff --git a/scripts/deploy_langflow_gcp_spot.sh b/scripts/deploy_langflow_gcp_spot.sh new file mode 100644 index 000000000..065b6013f --- /dev/null +++ b/scripts/deploy_langflow_gcp_spot.sh @@ -0,0 +1,90 @@ +# Set the VM, image, and networking configuration +VM_NAME="langflow-dev" +IMAGE_FAMILY="debian-11" +IMAGE_PROJECT="debian-cloud" +BOOT_DISK_SIZE="100GB" +ZONE="us-central1-a" +REGION="us-central1" +VPC_NAME="default" +SUBNET_NAME="default" +SUBNET_RANGE="10.128.0.0/20" +NAT_GATEWAY_NAME="nat-gateway" +CLOUD_ROUTER_NAME="nat-client" + +# Set the GCP project's compute region +gcloud config set compute/region $REGION + +# Check if the VPC exists, and create it if not +vpc_exists=$(gcloud compute networks list --filter="name=$VPC_NAME" --format="value(name)") +if [[ -z "$vpc_exists" ]]; then + gcloud compute networks create $VPC_NAME --subnet-mode=custom +fi + +# Check if the subnet exists, and create it if not +subnet_exists=$(gcloud compute networks subnets list --filter="name=$SUBNET_NAME AND region=$REGION" --format="value(name)") +if [[ -z "$subnet_exists" ]]; then + gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE +fi + +# Create a firewall rule to allow TCP port 8080 for all instances in the VPC +firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)") +if [[ -z "$firewall_8080_exists" ]]; then + gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS +fi + +# Create a firewall rule to allow IAP traffic +firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)") +if [[ -z "$firewall_iap_exists" ]]; then + gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS +fi + +# Define the startup script as a multiline Bash here-doc +STARTUP_SCRIPT=$(cat <<'EOF' +#!/bin/bash + +# Update and upgrade the system +apt -y update +apt -y upgrade + +# Install Python 3 pip, Langflow, and Nginx +apt -y install python3-pip +pip install langflow +apt-get -y install nginx + +# Configure Nginx for Langflow +touch /etc/nginx/sites-available/langflow-app +echo "server { + listen 0.0.0.0:8080; + + location / { + proxy_pass http://127.0.0.1:7860; + proxy_set_header Host "\$host"; + proxy_set_header X-Real-IP "\$remote_addr"; + proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for"; + } +}" >> /etc/nginx/sites-available/langflow-app +ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/ +sudo nginx -t +sudo systemctl restart nginx +langflow +EOF +) + +# Create a temporary file to store the startup script +tempfile=$(mktemp) +echo "$STARTUP_SCRIPT" > $tempfile + +# Create the VM instance with the specified configuration and startup script +gcloud compute instances create $VM_NAME \ + --image-family $IMAGE_FAMILY \ + --image-project $IMAGE_PROJECT \ + --boot-disk-size $BOOT_DISK_SIZE \ + --machine-type=n1-standard-4 \ + --metadata-from-file startup-script=$tempfile \ + --zone $ZONE \ + --network $VPC_NAME \ + --subnet $SUBNET_NAME \ + -preemptible + +# Remove the temporary file after the VM is created +rm $tempfile diff --git a/scripts/walkthroughtutorial.md b/scripts/walkthroughtutorial.md new file mode 100644 index 000000000..fa6e3c11d --- /dev/null +++ b/scripts/walkthroughtutorial.md @@ -0,0 +1,86 @@ +# Deploy Langflow on Google Cloud Platform + +**Duration**: 45 minutes +**Author**: [Robert Wilkins III](https://www.linkedin.com/in/robertwilkinsiii) + +## Introduction + +In this tutorial, you will learn how to deploy Langflow on [Google Cloud Platform](https://cloud.google.com/) (GCP) using Google Cloud Shell. + +This tutorial assumes you have a GCP account and basic knowledge of Google Cloud Shell. If you're not familiar with Cloud Shell, you can review the [Cloud Shell documentation](https://cloud.google.com/shell/docs). + +## Set up your environment + +Before you start, make sure you have the following prerequisites: + +- A GCP account with the necessary permissions to create resources +- A project on GCP where you want to deploy Langflow + +[**Select your GCP project**] + + + +In the next step, you'll configure the GCP environment and deploy Langflow. + +## Configure the GCP environment and deploy Langflow +Run the deploy_langflow_gcp.sh script to configure the GCP environment and deploy Langflow: + +```sh +gcloud config set project +bash ./deploy_langflow_gcp.sh +``` + +The script will: + +1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed +2. Create a startup script to install Python, Langflow, and Nginx +3. Create a Compute Engine VM instance with the specified configuration and startup script +4. Configure Nginx to serve Langflow on TCP port 8080 + + +> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished. + +In the next step, you'll learn how to connect to the Langflow VM. + +## Connect to the Langflow VM +To connect to your new Langflow VM, follow these steps: + +1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080 +
**or** +3. Run the following command to display the URL for your Langflow environment: +```bash +export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)") + +echo http://$LANGFLOW_IP:8080 +``` + +4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment + +Congratulations! You have successfully deployed Langflow on Google Cloud Platform. + + + +## Cleanup +If you want to remove the resources created during this tutorial, you can use the following commands: + +```sql +gcloud compute instances delete langflow-dev --zone us-central1-a --quiet +``` +The following network settings and services are used during this walkthrough. If you plan to continue using the project after the walkthrough, you may keep these configurations in place. + +However, if you decide to remove them after completing the walkthrough, you can use the following gcloud commands: + + +> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings. + +``` +gcloud compute firewall-rules delete allow-tcp-8080 --quiet + +gcloud compute firewall-rules delete allow-iap --quiet + +gcloud compute networks subnets delete default --region us-central1 --quiet + +gcloud compute networks delete default --quiet +``` diff --git a/scripts/walkthroughtutorial_spot.md b/scripts/walkthroughtutorial_spot.md new file mode 100644 index 000000000..751f03d78 --- /dev/null +++ b/scripts/walkthroughtutorial_spot.md @@ -0,0 +1,83 @@ +# Deploy Langflow on Google Cloud Platform + +**Duration**: 45 minutes +**Author**: [Robert Wilkins III](https://www.linkedin.com/in/robertwilkinsiii) + +## Introduction + +In this tutorial, you will learn how to deploy Langflow on [Google Cloud Platform](https://cloud.google.com/) (GCP) using Google Cloud Shell. + +This tutorial assumes you have a GCP account and basic knowledge of Google Cloud Shell. If you're not familiar with Cloud Shell, you can review the [Cloud Shell documentation](https://cloud.google.com/shell/docs). + +## Set up your environment + +Before you start, make sure you have the following prerequisites: + +- A GCP account with the necessary permissions to create resources +- A project on GCP where you want to deploy Langflow + +[**Select your GCP project**] + + + +In the next step, you'll configure the GCP environment and deploy Langflow. + +## Configure the GCP environment and deploy Langflow +Run the deploy_langflow_gcp_spot.sh script to configure the GCP environment and deploy Langflow: + +```sh +gcloud config set project +bash ./deploy_langflow_gcp.sh +``` + +The script will: + +1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed +2. Create a startup script to install Python, Langflow, and Nginx +3. Create a Compute Engine VM instance with the specified configuration and startup script +4. Configure Nginx to serve Langflow on TCP port 8080 + +> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished. + +In the next step, you'll learn how to connect to the Langflow VM. + +## Connect to the Langflow VM +To connect to your new Langflow VM, follow these steps: + +1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080 +
**or** +3. Run the following command to display the URL for your Langflow environment: +```bash +export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)") + +echo http://$LANGFLOW_IP:8080 +``` + +4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment + +Congratulations! You have successfully deployed Langflow on Google Cloud Platform. + + + +## Cleanup +If you want to remove the resources created during this tutorial, you can use the following commands: + +```sql +gcloud compute instances delete langflow-dev --zone us-central1-a --quiet +``` +The following network settings and services are used during this walkthrough. If you plan to continue using the project after the walkthrough, you may keep these configurations in place. + +However, if you decide to remove them after completing the walkthrough, you can use the following gcloud commands: +> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings. + +``` +gcloud compute firewall-rules delete allow-tcp-8080 --quiet + +gcloud compute firewall-rules delete allow-iap --quiet + +gcloud compute networks subnets delete default --region us-central1 --quiet + +gcloud compute networks delete default --quiet +``` diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 7f00167b4..9236d5996 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -6,6 +6,7 @@ chains: - SeriesCharacterChain - MidJourneyPromptChain - TimeTravelGuideChain + - SQLDatabaseChain agents: - ZeroShotAgent @@ -40,6 +41,27 @@ tools: - Tool - PythonFunction - JsonSpec + - News API + - TMDB API + - Podcast API + - QuerySQLDataBaseTool + - InfoSQLDatabaseTool + - ListSQLDatabaseTool + # - QueryCheckerTool + - BingSearchRun + - GoogleSearchRun + - GoogleSearchResults + - JsonListKeysTool + - JsonGetValueTool + - PythonREPLTool + - PythonAstREPLTool + - RequestsGetTool + - RequestsPostTool + - RequestsPatchTool + - RequestsPutTool + - RequestsDeleteTool + - WikipediaQueryRun + - WolframAlphaQueryRun wrappers: - RequestsWrapper @@ -91,4 +113,16 @@ documentloaders: textsplitters: - CharacterTextSplitter +utilities: + - BingSearchAPIWrapper + - GoogleSearchAPIWrapper + - GoogleSerperAPIWrapper + - SearxResults + - SearxSearchWrapper + - SerpAPIWrapper + - WikipediaAPIWrapper + - WolframAlphaAPIWrapper + # - ZapierNLAWrapper + - SQLDatabase + dev: false diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py index e77b81ec6..d45221be7 100644 --- a/src/backend/langflow/custom/customs.py +++ b/src/backend/langflow/custom/customs.py @@ -12,6 +12,9 @@ CUSTOM_NODES = { "VectorStoreRouterAgent": nodes.VectorStoreRouterAgentNode(), "SQLAgent": nodes.SQLAgentNode(), }, + "utilities": { + "SQLDatabase": nodes.SQLDatabaseNode(), + }, } diff --git a/src/backend/langflow/graph/base.py b/src/backend/langflow/graph/base.py index a4e6725da..ae82d68d1 100644 --- a/src/backend/langflow/graph/base.py +++ b/src/backend/langflow/graph/base.py @@ -211,7 +211,11 @@ class Node: "VectorStoreRouterAgent", "VectorStoreAgent", "VectorStoreInfo", - ] or self.node_type in ["VectorStoreInfo", "VectorStoreRouterToolkit"]: + ] or self.node_type in [ + "VectorStoreInfo", + "VectorStoreRouterToolkit", + "SQLDatabase", + ]: return self._built_object return deepcopy(self._built_object) diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py index 7296a0c0d..018174334 100644 --- a/src/backend/langflow/graph/nodes.py +++ b/src/backend/langflow/graph/nodes.py @@ -101,6 +101,10 @@ class ChainNode(Node): self.params[key] = value.build(tools=tools, force=force) self._build() + + #! Cannot deepcopy SQLDatabaseChain + if self.node_type in ["SQLDatabaseChain"]: + return self._built_object return deepcopy(self._built_object) diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py index 2ffc18b14..f07b03f04 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/langflow/interface/custom_lists.py @@ -9,10 +9,12 @@ from langchain import ( memory, requests, text_splitter, + utilities, vectorstores, ) from langchain.agents import agent_toolkits from langchain.chat_models import ChatOpenAI +from langchain.sql_database import SQLDatabase from langflow.interface.importing.utils import import_class @@ -76,3 +78,9 @@ documentloaders_type_to_cls_dict: dict[str, Any] = { textsplitter_type_to_cls_dict: dict[str, Any] = dict( inspect.getmembers(text_splitter, inspect.isclass) ) + +## Utilities +utility_type_to_cls_dict: dict[str, Any] = dict( + inspect.getmembers(utilities, inspect.isclass) +) +utility_type_to_cls_dict["SQLDatabase"] = SQLDatabase diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index a3480928e..e303da0eb 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -10,7 +10,7 @@ from langchain.chat_models.base import BaseChatModel from langchain.llms.base import BaseLLM from langchain.tools import BaseTool -from langflow.interface.tools.util import get_tool_by_name +from langflow.interface.tools.base import tool_creator def import_module(module_path: str) -> Any: @@ -44,6 +44,7 @@ def import_by_type(_type: str, name: str) -> Any: "vectorstores": import_vectorstore, "documentloaders": import_documentloader, "textsplitters": import_textsplitter, + "utilities": import_utility, } if _type == "llms": key = "chat" if "chat" in name.lower() else "llm" @@ -107,7 +108,7 @@ def import_llm(llm: str) -> BaseLLM: def import_tool(tool: str) -> BaseTool: """Import tool from tool name""" - return get_tool_by_name(tool) + return tool_creator.type_to_loader_dict[tool]["fcn"] def import_chain(chain: str) -> Type[Chain]: @@ -131,10 +132,16 @@ def import_vectorstore(vectorstore: str) -> Any: def import_documentloader(documentloader: str) -> Any: """Import documentloader from documentloader name""" - return import_class(f"langchain.document_loaders.{documentloader}") def import_textsplitter(textsplitter: str) -> Any: """Import textsplitter from textsplitter name""" return import_class(f"langchain.text_splitter.{textsplitter}") + + +def import_utility(utility: str) -> Any: + """Import utility from utility name""" + if utility == "SQLDatabase": + return import_class(f"langchain.sql_database.{utility}") + return import_class(f"langchain.utilities.{utility}") diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py index cf45fd9c5..3d73105c2 100644 --- a/src/backend/langflow/interface/listing.py +++ b/src/backend/langflow/interface/listing.py @@ -8,6 +8,7 @@ from langflow.interface.prompts.base import prompt_creator from langflow.interface.text_splitters.base import textsplitter_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.tools.base import tool_creator +from langflow.interface.utilities.base import utility_creator from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator @@ -26,6 +27,7 @@ def get_type_dict(): "vectorStore": vectorstore_creator.to_list(), "embeddings": embedding_creator.to_list(), "textSplitters": textsplitter_creator.to_list(), + "utilities": utility_creator.to_list(), } diff --git a/src/backend/langflow/interface/loading.py b/src/backend/langflow/interface/loading.py index 11db47ee6..4cf702ea2 100644 --- a/src/backend/langflow/interface/loading.py +++ b/src/backend/langflow/interface/loading.py @@ -68,6 +68,13 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any: params.pop("model") return class_object(**params) elif base_type == "vectorstores": + if len(params.get("documents", [])) == 0: + # Error when the pdf or other source was not correctly + # loaded. + raise ValueError( + "The source you provided did not load correctly or was empty." + "This may cause an error in the vectorstore." + ) return class_object.from_documents(**params) elif base_type == "documentloaders": return class_object(**params).load() @@ -75,16 +82,19 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any: documents = params.pop("documents") text_splitter = class_object(**params) return text_splitter.split_documents(documents) + elif base_type == "utilities": + if node_type == "SQLDatabase": + return class_object.from_uri(params.pop("uri")) return class_object(**params) -def load_flow_from_json(path: str): +def load_flow_from_json(path: str, build=True): # This is done to avoid circular imports from langflow.graph import Graph """Load flow from json file""" - with open(path, "r") as f: + with open(path, "r", encoding="utf-8") as f: flow_graph = json.load(f) data_graph = flow_graph["data"] nodes = data_graph["nodes"] @@ -96,7 +106,7 @@ def load_flow_from_json(path: str): # Nodes, edges and root node edges = data_graph["edges"] graph = Graph(nodes, edges) - return graph.build() + return graph.build() if build else graph def replace_zero_shot_prompt_with_prompt_template(nodes): diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 110d3827f..5fb4f0045 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -1,6 +1,7 @@ import contextlib import io from typing import Any, Dict +from chromadb.errors import NotEnoughElementsException from langflow.cache.base import compute_dict_hash, load_cache, memoize_dict from langflow.graph.graph import Graph @@ -230,6 +231,10 @@ def get_result_and_steps(langchain_object, message: str): else: thought = output_buffer.getvalue() + except NotEnoughElementsException as exc: + raise ValueError( + "Error: Not enough documents for ChromaDB to index. Try reducing chunk size in TextSplitter." + ) from exc except Exception as exc: raise ValueError(f"Error: {str(exc)}") from exc return result, thought diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index 6a3439bf0..5fd0c72f0 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -1,7 +1,6 @@ from typing import Dict, List, Optional from langchain.agents.load_tools import ( - _BASE_TOOLS, _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS, @@ -10,17 +9,16 @@ from langchain.agents.load_tools import ( from langflow.custom import customs from langflow.interface.base import LangChainTypeCreator from langflow.interface.tools.constants import ( + ALL_TOOLS_NAMES, CUSTOM_TOOLS, FILE_TOOLS, + OTHER_TOOLS, ) -from langflow.interface.tools.util import ( - get_tool_by_name, - get_tool_params, - get_tools_dict, -) +from langflow.interface.tools.util import get_tool_params from langflow.settings import settings from langflow.template.base import Template, TemplateField from langflow.utils import util +from langflow.utils.util import build_template_from_class TOOL_INPUTS = { "str": TemplateField( @@ -66,64 +64,81 @@ class ToolCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.tools_dict is None: - self.tools_dict = get_tools_dict() + all_tools = {} + for tool, tool_fcn in ALL_TOOLS_NAMES.items(): + tool_params = get_tool_params(tool_fcn) + tool_name = tool_params.get("name", tool) + + if tool_name in settings.tools or settings.dev: + if tool_name == "JsonSpec": + tool_params["path"] = tool_params.pop("dict_") # type: ignore + all_tools[tool_name] = { + "type": tool, + "params": tool_params, + "fcn": tool_fcn, + } + + self.tools_dict = all_tools + return self.tools_dict def get_signature(self, name: str) -> Optional[Dict]: """Get the signature of a tool.""" base_classes = ["Tool"] - all_tools = {} - for tool in self.type_to_loader_dict.keys(): - tool_fcn = get_tool_by_name(tool) - if tool_params := get_tool_params(tool_fcn): - tool_name = tool_params.get("name") or str(tool) - all_tools[tool_name] = { - "type": tool, - "params": tool_params, - "fcn": tool_fcn, - } + fields = [] + params = [] + tool_params = {} # Raise error if name is not in tools - if name not in all_tools.keys(): + if name not in self.type_to_loader_dict.keys(): raise ValueError("Tool not found") - tool_type: str = all_tools[name]["type"] # type: ignore + tool_type: str = self.type_to_loader_dict[name]["type"] # type: ignore - if all_tools[tool_type]["fcn"] in _BASE_TOOLS.values(): - params = [] - elif all_tools[tool_type]["fcn"] in _LLM_TOOLS.values(): + # if tool_type in _BASE_TOOLS.keys(): + # params = [] + if tool_type in _LLM_TOOLS.keys(): params = ["llm"] - elif all_tools[tool_type]["fcn"] in [ - val[0] for val in _EXTRA_LLM_TOOLS.values() - ]: - n_dict = {val[0]: val[1] for val in _EXTRA_LLM_TOOLS.values()} - extra_keys = n_dict[all_tools[tool_type]["fcn"]] + elif tool_type in _EXTRA_LLM_TOOLS.keys(): + extra_keys = _EXTRA_LLM_TOOLS[tool_type][1] params = ["llm"] + extra_keys - elif all_tools[tool_type]["fcn"] in [ - val[0] for val in _EXTRA_OPTIONAL_TOOLS.values() - ]: - n_dict = {val[0]: val[1] for val in _EXTRA_OPTIONAL_TOOLS.values()} # type: ignore - extra_keys = n_dict[all_tools[tool_type]["fcn"]] + elif tool_type in _EXTRA_OPTIONAL_TOOLS.keys(): + extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type][1] params = extra_keys # elif tool_type == "Tool": # params = ["name", "description", "func"] elif tool_type in CUSTOM_TOOLS: # Get custom tool params - params = all_tools[name]["params"] # type: ignore + params = self.type_to_loader_dict[name]["params"] # type: ignore base_classes = ["function"] if node := customs.get_custom_nodes("tools").get(tool_type): return node elif tool_type in FILE_TOOLS: - params = all_tools[name]["params"] # type: ignore - if tool_type == "JsonSpec": - params["path"] = params.pop("dict_") # type: ignore + params = self.type_to_loader_dict[name]["params"] # type: ignore base_classes += [name] - else: - params = [] + elif tool_type in OTHER_TOOLS: + print(tool_type) + tool_dict = build_template_from_class(tool_type, OTHER_TOOLS) + fields = tool_dict["template"] + + # Pop unnecessary fields and add name + fields.pop("_type") # type: ignore + fields.pop("return_direct") # type: ignore + fields.pop("verbose") # type: ignore + + tool_params = { + "name": fields.pop("name")["value"], # type: ignore + "description": fields.pop("description")["value"], # type: ignore + } + + fields = [ + TemplateField(name=name, field_type=field["type"], **field) + for name, field in fields.items() # type: ignore + ] + base_classes += tool_dict["base_classes"] # Copy the field and add the name - fields = [] for param in params: field = TOOL_INPUTS.get(param, TOOL_INPUTS["str"]).copy() field.name = param @@ -134,7 +149,7 @@ class ToolCreator(LangChainTypeCreator): template = Template(fields=fields, type_name=tool_type) - tool_params = all_tools[name]["params"] + tool_params = {**tool_params, **self.type_to_loader_dict[name]["params"]} return { "template": util.format_dict(template.to_dict()), **tool_params, @@ -144,21 +159,7 @@ class ToolCreator(LangChainTypeCreator): def to_list(self) -> List[str]: """List all load tools""" - tools = [] - - for tool, fcn in get_tools_dict().items(): - tool_params = get_tool_params(fcn) - - if tool_params and not tool_params.get("name"): - tool_params["name"] = tool - - if tool_params and ( - tool_params.get("name") in settings.tools - or (tool_params.get("name") and settings.dev) - ): - tools.append(tool_params["name"]) - - return tools + return list(self.type_to_loader_dict.keys()) tool_creator = ToolCreator() diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index 2ec20cef9..34890a684 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -5,12 +5,50 @@ from langchain.agents.load_tools import ( _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS, ) -from langchain.tools.json.tool import JsonSpec +from langchain.tools.bing_search.tool import BingSearchRun +from langchain.tools.google_search.tool import GoogleSearchResults, GoogleSearchRun +from langchain.tools.json.tool import JsonGetValueTool, JsonListKeysTool, JsonSpec +from langchain.tools.python.tool import PythonAstREPLTool, PythonREPLTool +from langchain.tools.requests.tool import ( + RequestsDeleteTool, + RequestsGetTool, + RequestsPatchTool, + RequestsPostTool, + RequestsPutTool, +) +from langchain.tools.sql_database.tool import ( + InfoSQLDatabaseTool, + ListSQLDatabaseTool, + QueryCheckerTool, + QuerySQLDataBaseTool, +) +from langchain.tools.wikipedia.tool import WikipediaQueryRun +from langchain.tools.wolfram_alpha.tool import WolframAlphaQueryRun from langflow.interface.tools.custom import PythonFunction FILE_TOOLS = {"JsonSpec": JsonSpec} CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction} +OTHER_TOOLS = { + "QuerySQLDataBaseTool": QuerySQLDataBaseTool, + "InfoSQLDatabaseTool": InfoSQLDatabaseTool, + "ListSQLDatabaseTool": ListSQLDatabaseTool, + "QueryCheckerTool": QueryCheckerTool, + "BingSearchRun": BingSearchRun, + "GoogleSearchRun": GoogleSearchRun, + "GoogleSearchResults": GoogleSearchResults, + "JsonListKeysTool": JsonListKeysTool, + "JsonGetValueTool": JsonGetValueTool, + "PythonREPLTool": PythonREPLTool, + "PythonAstREPLTool": PythonAstREPLTool, + "RequestsGetTool": RequestsGetTool, + "RequestsPostTool": RequestsPostTool, + "RequestsPatchTool": RequestsPatchTool, + "RequestsPutTool": RequestsPutTool, + "RequestsDeleteTool": RequestsDeleteTool, + "WikipediaQueryRun": WikipediaQueryRun, + "WolframAlphaQueryRun": WolframAlphaQueryRun, +} ALL_TOOLS_NAMES = { **_BASE_TOOLS, **_LLM_TOOLS, # type: ignore @@ -18,4 +56,5 @@ ALL_TOOLS_NAMES = { **{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()}, **CUSTOM_TOOLS, **FILE_TOOLS, # type: ignore + **OTHER_TOOLS, } diff --git a/src/backend/langflow/interface/tools/util.py b/src/backend/langflow/interface/tools/util.py index 8f8673b6b..f1d66696a 100644 --- a/src/backend/langflow/interface/tools/util.py +++ b/src/backend/langflow/interface/tools/util.py @@ -4,29 +4,6 @@ from typing import Dict, Union from langchain.agents.tools import Tool -from langflow.interface.tools.constants import ALL_TOOLS_NAMES - - -def get_tools_dict(): - """Get the tools dictionary.""" - - all_tools = {} - - for tool, fcn in ALL_TOOLS_NAMES.items(): - if tool_params := get_tool_params(fcn): - tool_name = tool_params.get("name") or str(tool) - all_tools[tool_name] = fcn - - return all_tools - - -def get_tool_by_name(name: str): - """Get a tool from the tools dictionary.""" - tools = get_tools_dict() - if name not in tools: - raise ValueError(f"{name} not found.") - return tools[name] - def get_func_tool_params(func, **kwargs) -> Union[Dict, None]: tree = ast.parse(inspect.getsource(func)) @@ -113,6 +90,8 @@ def get_tool_params(tool, **kwargs) -> Dict: elif inspect.isclass(tool): # Get the parameters necessary to # instantiate the class + return get_class_tool_params(tool, **kwargs) or {} + else: raise ValueError("Tool must be a function or class.") diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index fd5d9ec3e..085537756 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -8,6 +8,7 @@ from langflow.interface.prompts.base import prompt_creator from langflow.interface.text_splitters.base import textsplitter_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.tools.base import tool_creator +from langflow.interface.utilities.base import utility_creator from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator @@ -42,6 +43,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union vectorstore_creator, documentloader_creator, textsplitter_creator, + utility_creator, ] all_types = {} diff --git a/src/backend/langflow/interface/utilities/__init__.py b/src/backend/langflow/interface/utilities/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/langflow/interface/utilities/base.py new file mode 100644 index 000000000..e60e344ad --- /dev/null +++ b/src/backend/langflow/interface/utilities/base.py @@ -0,0 +1,39 @@ +from typing import Dict, List, Optional + +from langflow.custom.customs import get_custom_nodes +from langflow.interface.base import LangChainTypeCreator +from langflow.interface.custom_lists import utility_type_to_cls_dict +from langflow.settings import settings +from langflow.utils.logger import logger +from langflow.utils.util import build_template_from_class + + +class UtilityCreator(LangChainTypeCreator): + type_name: str = "utilities" + + @property + def type_to_loader_dict(self) -> Dict: + return utility_type_to_cls_dict + + def get_signature(self, name: str) -> Optional[Dict]: + """Get the signature of a utility.""" + try: + if name in get_custom_nodes(self.type_name).keys(): + return get_custom_nodes(self.type_name)[name] + return build_template_from_class(name, utility_type_to_cls_dict) + except ValueError as exc: + raise ValueError(f"Utility {name} not found") from exc + + except AttributeError as exc: + logger.error(f"Utility {name} not loaded: {exc}") + return None + + def to_list(self) -> List[str]: + return [ + utility.__name__ + for utility in self.type_to_loader_dict.values() + if utility.__name__ in settings.utilities or settings.dev + ] + + +utility_creator = UtilityCreator() diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py index c5377c85a..48aa5939d 100644 --- a/src/backend/langflow/settings.py +++ b/src/backend/langflow/settings.py @@ -18,6 +18,7 @@ class Settings(BaseSettings): wrappers: List[str] = [] toolkits: List[str] = [] textsplitters: List[str] = [] + utilities: List[str] = [] dev: bool = False class Config: @@ -42,6 +43,7 @@ class Settings(BaseSettings): self.wrappers = new_settings.wrappers or [] self.toolkits = new_settings.toolkits or [] self.textsplitters = new_settings.textsplitters or [] + self.utilities = new_settings.utilities or [] self.dev = new_settings.dev or False diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py index 6ac026e59..f2e8bd94f 100644 --- a/src/backend/langflow/template/nodes.py +++ b/src/backend/langflow/template/nodes.py @@ -256,6 +256,29 @@ class CSVAgentNode(FrontendNode): return super().to_dict() +class SQLDatabaseNode(FrontendNode): + name: str = "SQLDatabase" + template: Template = Template( + type_name="sql_database", + fields=[ + TemplateField( + field_type="str", + required=True, + is_list=False, + show=True, + multiline=False, + value="", + name="uri", + ), + ], + ) + description: str = """SQLAlchemy wrapper around a database.""" + base_classes: list[str] = ["SQLDatabase"] + + def to_dict(self): + return super().to_dict() + + class VectorStoreAgentNode(FrontendNode): name: str = "VectorStoreAgent" template: Template = Template( diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json index df61239c5..8491f6873 100644 --- a/src/frontend/package-lock.json +++ b/src/frontend/package-lock.json @@ -14,6 +14,7 @@ "@heroicons/react": "^2.0.15", "@mui/material": "^5.11.9", "@tailwindcss/forms": "^0.5.3", + "@tailwindcss/line-clamp": "^0.4.4", "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", @@ -30,7 +31,7 @@ "react-cookie": "^4.1.1", "react-dom": "^18.2.0", "react-error-boundary": "^4.0.2", - "react-icons": "^4.7.1", + "react-icons": "^4.8.0", "react-laag": "^2.0.5", "react-router-dom": "^6.8.1", "react-scripts": "5.0.1", @@ -3930,6 +3931,14 @@ "tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1" } }, + "node_modules/@tailwindcss/line-clamp": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@tailwindcss/line-clamp/-/line-clamp-0.4.4.tgz", + "integrity": "sha512-5U6SY5z8N42VtrCrKlsTAA35gy2VSyYtHWCsg1H87NU1SXnEfekTVlrga9fzUDrrHcGi2Lb5KenUWb4lRQT5/g==", + "peerDependencies": { + "tailwindcss": ">=2.0.0 || >=3.0.0 || >=3.0.0-alpha.1" + } + }, "node_modules/@testing-library/dom": { "version": "8.20.0", "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.20.0.tgz", @@ -15031,9 +15040,9 @@ "integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==" }, "node_modules/react-icons": { - "version": "4.7.1", - "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.7.1.tgz", - "integrity": "sha512-yHd3oKGMgm7zxo3EA7H2n7vxSoiGmHk5t6Ou4bXsfcgWyhfDKMpyKfhHR6Bjnn63c+YXBLBPUql9H4wPJM6sXw==", + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.8.0.tgz", + "integrity": "sha512-N6+kOLcihDiAnj5Czu637waJqSnwlMNROzVZMhfX68V/9bu9qHaMIJC4UdozWoOk57gahFCNHwVvWzm0MTzRjg==", "peerDependencies": { "react": "*" } diff --git a/src/frontend/package.json b/src/frontend/package.json index b669569e3..feb11e814 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -9,6 +9,7 @@ "@heroicons/react": "^2.0.15", "@mui/material": "^5.11.9", "@tailwindcss/forms": "^0.5.3", + "@tailwindcss/line-clamp": "^0.4.4", "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", @@ -25,7 +26,7 @@ "react-cookie": "^4.1.1", "react-dom": "^18.2.0", "react-error-boundary": "^4.0.2", - "react-icons": "^4.7.1", + "react-icons": "^4.8.0", "react-laag": "^2.0.5", "react-router-dom": "^6.8.1", "react-scripts": "5.0.1", @@ -60,4 +61,4 @@ ] }, "proxy": "http://backend:7860" -} \ No newline at end of file +} diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index ff13af901..f362ca8ff 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -38,7 +38,7 @@ export default function GenericNode({
diff --git a/src/frontend/src/assets/Gooey Ring-5s-271px.svg b/src/frontend/src/assets/Gooey Ring-5s-271px.svg new file mode 100644 index 000000000..6c3433420 --- /dev/null +++ b/src/frontend/src/assets/Gooey Ring-5s-271px.svg @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/frontend/src/components/TooltipComponent/index.tsx b/src/frontend/src/components/TooltipComponent/index.tsx index a111d047b..65c8d0e5f 100644 --- a/src/frontend/src/components/TooltipComponent/index.tsx +++ b/src/frontend/src/components/TooltipComponent/index.tsx @@ -1,6 +1,7 @@ import { ReactElement } from "react"; import { LightTooltip } from "../LightTooltipComponent"; +import { TooltipComponentType } from "../../types/components"; -export default function Tooltip({ children, title }:{children:ReactElement,title:string}) { - return {children}; +export default function Tooltip({ children, title,placement }:TooltipComponentType) { + return {children}; } diff --git a/src/frontend/src/components/chatComponent/chatTrigger/index.tsx b/src/frontend/src/components/chatComponent/chatTrigger/index.tsx new file mode 100644 index 000000000..9d5f67e0c --- /dev/null +++ b/src/frontend/src/components/chatComponent/chatTrigger/index.tsx @@ -0,0 +1,37 @@ +import { Transition } from "@headlessui/react"; +import { Bars3CenterLeftIcon, ChatBubbleBottomCenterTextIcon } from "@heroicons/react/24/outline"; +import { nodeColors } from "../../../utils"; +import { PopUpContext } from "../../../contexts/popUpContext"; +import { useContext } from "react"; +import ChatModal from "../../../modals/chatModal"; + +export default function ChatTrigger({open, setOpen,flow}){ + const {openPopUp} = useContext(PopUpContext) + return( +
+
+ +
+
+
) +} \ No newline at end of file diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx index 1febb7539..668a970bd 100644 --- a/src/frontend/src/components/chatComponent/index.tsx +++ b/src/frontend/src/components/chatComponent/index.tsx @@ -1,290 +1,32 @@ -import { Transition } from "@headlessui/react"; import { - Bars3CenterLeftIcon, - LockClosedIcon, - PaperAirplaneIcon, - XMarkIcon, -} from "@heroicons/react/24/outline"; -import { - MouseEventHandler, - useContext, useEffect, useRef, useState, } from "react"; -import { sendAll } from "../../controllers/API"; -import { alertContext } from "../../contexts/alertContext"; -import { classNames, nodeColors, snakeToNormalCase } from "../../utils"; -import { TabsContext } from "../../contexts/tabsContext"; -import { ChatType } from "../../types/chat"; -import ChatMessage from "./chatMessage"; -import { NodeType } from "../../types/flow"; + +import { ChatMessageType, ChatType } from "../../types/chat"; +import ChatTrigger from "./chatTrigger"; +import ChatModal from "../../modals/chatModal"; const _ = require("lodash"); -export default function Chat({ flow, reactFlowInstance }: ChatType) { - const { updateFlow, lockChat, setLockChat, flows, tabIndex } = - useContext(TabsContext); - const [saveChat, setSaveChat] = useState(false); - const [open, setOpen] = useState(true); - const [chatValue, setChatValue] = useState(""); - const [chatHistory, setChatHistory] = useState(flow.chat); - const { setErrorData, setNoticeData } = useContext(alertContext); - const addChatHistory = ( - message: string, - isSend: boolean, - thought?: string - ) => { - let tabsChange = false; - setChatHistory((old) => { - let newChat = _.cloneDeep(old); - if (JSON.stringify(flow.chat) !== JSON.stringify(old)) { - tabsChange = true; - return old; - } - if (thought) { - newChat.push({ message, isSend, thought }); - } else { - newChat.push({ message, isSend }); - } - return newChat; - }); - if (tabsChange) { - if (thought) { - updateFlow({ - ..._.cloneDeep(flow), - chat: [...flow.chat, { isSend, message, thought }], - }); - } else { - updateFlow({ - ..._.cloneDeep(flow), - chat: [...flow.chat, { isSend, message }], - }); - } - } - setSaveChat((chat) => !chat); - }; +export default function Chat({ flow }: ChatType) { + const [open, setOpen] = useState(false); useEffect(() => { - updateFlow({ ..._.cloneDeep(flow), chat: chatHistory }); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [saveChat]); - useEffect(() => { - setChatHistory(flow.chat); - }, [flow]); - useEffect(() => { - if (ref.current) ref.current.scrollIntoView({ behavior: "smooth" }); - }, [chatHistory]); - - function validateNode(n: NodeType): Array { - if (!n.data?.node?.template || !Object.keys(n.data.node.template)) { - setNoticeData({ - title: - "We've noticed a potential issue with a node in the flow. Please review it and, if necessary, submit a bug report with your exported flow file. Thank you for your help!", - }); - return []; - } - - const { - type, - node: { template }, - } = n.data; - - return Object.keys(template).reduce( - (errors: Array, t) => - errors.concat( - (template[t].required && template[t].show) && - (!template[t].value && template[t].value !== false && template[t].value === "") && - !reactFlowInstance - .getEdges() - .some( - (e) => - e.targetHandle.split("|")[1] === t && - e.targetHandle.split("|")[2] === n.id - ) - ? [ - `${type} is missing ${template.display_name - ? template.display_name - : snakeToNormalCase(template[t].name) - }.`, - ] - : [] - ), - [] as string[] - ); - } - - function validateNodes() { - return reactFlowInstance - .getNodes() - .flatMap((n: NodeType) => validateNode(n)); - } - - const ref = useRef(null); - - function sendMessage() { - if (chatValue !== "") { - let nodeValidationErrors = validateNodes(); - if (nodeValidationErrors.length === 0) { - setLockChat(true); - let message = chatValue; - setChatValue(""); - addChatHistory(message, true); - - sendAll({ - ...reactFlowInstance.toObject(), - message, - chatHistory, - name: flow.name, - description: flow.description, - }) - .then((r) => { - addChatHistory(r.data.result, false, r.data.thought); - setLockChat(false); - }) - .catch((error) => { - setErrorData({ - title: error.message ?? "Unknown Error", - list: [error.response.data.detail], - }); - setLockChat(false); - let lastMessage; - setChatHistory((chatHistory) => { - let newChat = chatHistory; - - lastMessage = newChat.pop().message; - return newChat; - }); - setChatValue(lastMessage); - }); - } else { - setErrorData({ - title: "Oops! Looks like you missed some required information:", - list: nodeValidationErrors, - }); - } - } else { - setErrorData({ - title: "Error sending message", - list: ["The message cannot be empty."], - }); - } - } - function clearChat() { - setChatHistory([]); - updateFlow({ ..._.cloneDeep(flow), chat: [] }); - } - + const handleKeyDown = (event: KeyboardEvent) => { + if (event.key === "K" && event.shiftKey && (event.metaKey||event.ctrlKey)) { + setOpen(oldState=>!oldState); + } + }; + document.addEventListener("keydown", handleKeyDown); + return () => { + document.removeEventListener("keydown", handleKeyDown); + }; + }, []); return ( <> - -
-
-
{ - setOpen(false); - }} - className="flex justify-between cursor-pointer items-center px-5 py-2 border-b dark:border-b-gray-700" - > -
- - Chat -
- -
-
- {chatHistory.map((c, i) => ( - - ))} -
-
-
-
- { - if (event.key === "Enter" && !lockChat) { - sendMessage(); - } - }} - type="text" - disabled={lockChat} - value={lockChat ? "Thinking..." : chatValue} - onChange={(e) => { - setChatValue(e.target.value); - }} - className={classNames( - lockChat ? "bg-gray-500 text-white" : "dark:bg-gray-700", - "form-input block w-full rounded-md border-gray-300 dark:border-gray-600 dark:text-white pr-10 sm:text-sm" - )} - placeholder={"Send a message..."} - /> -
- -
-
-
-
-
-
- -
-
- -
-
-
+ + ); } diff --git a/src/frontend/src/components/loadingComponent/index.tsx b/src/frontend/src/components/loadingComponent/index.tsx index 668c1c5a8..6181fd7cc 100644 --- a/src/frontend/src/components/loadingComponent/index.tsx +++ b/src/frontend/src/components/loadingComponent/index.tsx @@ -10,6 +10,7 @@ export default function LoadingComponent({remSize}:LoadingComponentProps){ +

Loading...
) diff --git a/src/frontend/src/contexts/index.tsx b/src/frontend/src/contexts/index.tsx index 310606ea5..06c576b83 100644 --- a/src/frontend/src/contexts/index.tsx +++ b/src/frontend/src/contexts/index.tsx @@ -13,12 +13,10 @@ export default function ContextWrapper({ children }: { children: ReactNode }) { - - + - {children} + {children} - diff --git a/src/frontend/src/contexts/tabsContext.tsx b/src/frontend/src/contexts/tabsContext.tsx index a11f0339d..288a4fea2 100644 --- a/src/frontend/src/contexts/tabsContext.tsx +++ b/src/frontend/src/contexts/tabsContext.tsx @@ -1,11 +1,18 @@ -import { createContext, useEffect, useState, useRef, ReactNode, useContext } from "react"; +import { + createContext, + useEffect, + useState, + useRef, + ReactNode, + useContext, +} from "react"; import { FlowType } from "../types/flow"; import { TabsContextType } from "../types/tabs"; import { normalCaseToSnakeCase } from "../utils"; import { alertContext } from "./alertContext"; const TabsContextInitialValue: TabsContextType = { - save:()=>{}, + save: () => {}, tabIndex: 0, setTabIndex: (index: number) => {}, flows: [], @@ -13,11 +20,9 @@ const TabsContextInitialValue: TabsContextType = { addFlow: (flowData?: any) => {}, updateFlow: (newFlow: FlowType) => {}, incrementNodeId: () => 0, - downloadFlow: (flow:FlowType) => {}, + downloadFlow: (flow: FlowType) => {}, uploadFlow: () => {}, - lockChat: false, - setLockChat:(prevState:boolean)=>{}, - hardReset:()=>{}, + hardReset: () => {}, }; export const TabsContext = createContext( @@ -25,7 +30,7 @@ export const TabsContext = createContext( ); export function TabsProvider({ children }: { children: ReactNode }) { - const {setNoticeData} = useContext(alertContext) + const { setNoticeData } = useContext(alertContext); const [tabIndex, setTabIndex] = useState(0); const [flows, setFlows] = useState>([]); const [id, setId] = useState(0); @@ -36,20 +41,18 @@ export function TabsProvider({ children }: { children: ReactNode }) { newNodeId.current = newNodeId.current + 1; return newNodeId.current; } - function save(){ + function save() { if (flows.length !== 0) - window.localStorage.setItem( - "tabsData", - JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current }) - ); + window.localStorage.setItem( + "tabsData", + JSON.stringify({ tabIndex, flows, id, nodeId: newNodeId.current }) + ); } useEffect(() => { //save tabs locally - save() + save(); }, [flows, id, tabIndex, newNodeId]); - - useEffect(() => { //get tabs locally saved let cookie = window.localStorage.getItem("tabsData"); @@ -61,15 +64,17 @@ export function TabsProvider({ children }: { children: ReactNode }) { newNodeId.current = cookieObject.nodeId; } }, []); - function hardReset(){ - newNodeId.current=0; - setTabIndex(0);setFlows([]);setId(0); + function hardReset() { + newNodeId.current = 0; + setTabIndex(0); + setFlows([]); + setId(0); } /** * Downloads the current flow as a JSON file */ - function downloadFlow(flow:FlowType) { + function downloadFlow(flow: FlowType) { // create a data URI with the current flow data const jsonString = `data:text/json;chatset=utf-8,${encodeURIComponent( JSON.stringify(flow) @@ -82,7 +87,9 @@ export function TabsProvider({ children }: { children: ReactNode }) { // simulate a click on the link element to trigger the download link.click(); - setNoticeData({title:"Warning: Critical data,JSON file may including API keys."}) + setNoticeData({ + title: "Warning: Critical data,JSON file may including API keys.", + }); } /** @@ -139,15 +146,14 @@ export function TabsProvider({ children }: { children: ReactNode }) { function addFlow(flow?: FlowType) { // Get data from the flow or set it to null if there's no flow provided. const data = flow?.data ? flow.data : null; - const description = flow?.description?flow.description:"" + const description = flow?.description ? flow.description : ""; // Create a new flow with a default name if no flow is provided. let newFlow: FlowType = { description, - name: "New Flow", + name: flow?.name ?? "New Flow", id: id.toString(), data, - chat: flow ? flow.chat : [], }; // Increment the ID counter. @@ -171,10 +177,9 @@ export function TabsProvider({ children }: { children: ReactNode }) { const newFlows = [...prevState]; const index = newFlows.findIndex((flow) => flow.id === newFlow.id); if (index !== -1) { - newFlows[index].description = newFlow.description??"" + newFlows[index].description = newFlow.description ?? ""; newFlows[index].data = newFlow.data; newFlows[index].name = newFlow.name; - newFlows[index].chat = newFlow.chat; } return newFlows; }); @@ -185,8 +190,6 @@ export function TabsProvider({ children }: { children: ReactNode }) { value={{ save, hardReset, - lockChat, - setLockChat, tabIndex, setTabIndex, flows, diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index 8fa7ff527..490ec2837 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -1,6 +1,7 @@ import { PromptTypeAPI, errorsTypeAPI } from './../../types/api/index'; import { APIObjectType, sendAllProps } from '../../types/api/index'; import axios, { AxiosResponse } from "axios"; +import { FlowType } from '../../types/flow'; export async function getAll():Promise> { return await axios.get(`/all`); @@ -18,4 +19,22 @@ export async function checkCode(code:string):Promise>{ return await axios.post('/validate/prompt',{template}) -} \ No newline at end of file +} + +export async function getExamples(): Promise { + const url = 'https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples'; + const response = await axios.get(url); + + const jsonFiles = response.data.filter((file: any) => { + return file.name.endsWith('.json'); + }); + + const contentsPromises = jsonFiles.map(async (file: any) => { + const contentResponse = await axios.get(file.download_url); + return contentResponse.data; + }); + + const contents = await Promise.all(contentsPromises); + + return contents; + } \ No newline at end of file diff --git a/src/frontend/src/modals/chatModal/chatInput/index.tsx b/src/frontend/src/modals/chatModal/chatInput/index.tsx new file mode 100644 index 000000000..7b371edb1 --- /dev/null +++ b/src/frontend/src/modals/chatModal/chatInput/index.tsx @@ -0,0 +1,57 @@ +import { LockClosedIcon, PaperAirplaneIcon } from "@heroicons/react/24/outline"; +import { classNames } from "../../../utils"; +import { useRef } from "react"; + +export default function ChatInput({ + lockChat, + chatValue, + sendMessage, + setChatValue, +}: { + lockChat:boolean; + chatValue:string; + sendMessage:Function; + setChatValue:Function; +}) { + const inputRef = useRef(null); + return ( + <> +