diff --git a/.github/stale.yml b/.github/stale.yml
new file mode 100644
index 000000000..a28a07086
--- /dev/null
+++ b/.github/stale.yml
@@ -0,0 +1,17 @@
+# Number of days of inactivity before an issue becomes stale
+daysUntilStale: 45
+# Number of days of inactivity before a stale issue is closed
+daysUntilClose: 7
+# Issues with these labels will never be considered stale
+exemptLabels:
+ - pinned
+ - security
+# Label to use when marking an issue as stale
+staleLabel: stale
+# Comment to post when marking an issue as stale. Set to `false` to disable
+markComment: >
+ This issue has been automatically marked as stale because it has not had
+ recent activity. It will be closed if no further activity occurs. Thank you
+ for your contributions.
+# Comment to post when closing a stale issue. Set to `false` to disable
+closeComment: false
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 1b713c3a0..000000000
--- a/Dockerfile
+++ /dev/null
@@ -1,14 +0,0 @@
-FROM python:3.10-slim
-
-RUN apt-get update && apt-get install gcc g++ git make -y
-RUN useradd -m -u 1000 user
-USER user
-ENV HOME=/home/user \
- PATH=/home/user/.local/bin:$PATH
-
-WORKDIR $HOME/app
-
-COPY --chown=user . $HOME/app
-
-RUN pip install langflow>==0.0.71 -U --user
-CMD ["langflow", "--host", "0.0.0.0", "--port", "7860"]
diff --git a/GCP_DEPLOYMENT.md b/GCP_DEPLOYMENT.md
index edb7e043c..36c81e19f 100644
--- a/GCP_DEPLOYMENT.md
+++ b/GCP_DEPLOYMENT.md
@@ -6,14 +6,14 @@ This guide will help you set up a Langflow development VM in a Google Cloud Plat
## Standard VM
-[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment.
## Spot/Preemptible Instance
-[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts.
diff --git a/README.md b/README.md
index 9b0a0cb3a..de98f87a1 100644
--- a/README.md
+++ b/README.md
@@ -37,6 +37,15 @@ or
langflow
```
+### Deploy Langflow on Google Cloud Platform
+
+Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
+
+Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project.
+
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
+
+
### Deploy Langflow on Google Cloud Platform
Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
diff --git a/docker-compose.yml b/docker-compose.yml
index d9ba84030..755d0794d 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -17,6 +17,8 @@ services:
dockerfile: ./dev.Dockerfile
args:
- BACKEND_URL=http://backend:7860
+ environment:
+ - VITE_PROXY_TARGET=http://backend:7860
ports:
- "3000:3000"
volumes:
diff --git a/docker_example/Dockerfile b/docker_example/Dockerfile
index d05a6810b..1b713c3a0 100644
--- a/docker_example/Dockerfile
+++ b/docker_example/Dockerfile
@@ -1,7 +1,14 @@
-FROM python:3.11-slim
+FROM python:3.10-slim
-RUN apt-get update && apt-get install gcc -y
-RUN pip install langflow>=0.0.33
+RUN apt-get update && apt-get install gcc g++ git make -y
+RUN useradd -m -u 1000 user
+USER user
+ENV HOME=/home/user \
+ PATH=/home/user/.local/bin:$PATH
-EXPOSE 7860
-CMD ["langflow", "--host", "0.0.0.0"]
\ No newline at end of file
+WORKDIR $HOME/app
+
+COPY --chown=user . $HOME/app
+
+RUN pip install langflow>==0.0.71 -U --user
+CMD ["langflow", "--host", "0.0.0.0", "--port", "7860"]
diff --git a/poetry.lock b/poetry.lock
index 7aff75b41..b9f7fcb52 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -511,14 +511,14 @@ files = [
[[package]]
name = "chromadb"
-version = "0.3.22"
+version = "0.3.23"
description = "Chroma."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "chromadb-0.3.22-py3-none-any.whl", hash = "sha256:54b58e562ab8a63194ce3b453633ce351475193de2184845f0577db969f1cf49"},
- {file = "chromadb-0.3.22.tar.gz", hash = "sha256:41acb262c2c7bb41afecd50737f440dce3fdaa3d3fe1749d0e4be1ffc8699e63"},
+ {file = "chromadb-0.3.23-py3-none-any.whl", hash = "sha256:c1e04fddff0916243895bedeffc1977745328f62404d70981eb1a0cb9dcdfaf3"},
+ {file = "chromadb-0.3.23.tar.gz", hash = "sha256:87fa922c92e2e90fb48234b435e9d4f0c61646fbd1526062f53f63326fc21228"},
]
[package.dependencies]
@@ -1180,20 +1180,19 @@ grpc = ["grpcio (>=1.44.0,<2.0.0dev)"]
[[package]]
name = "gptcache"
-version = "0.1.23"
+version = "0.1.24"
description = "GPTCache, a powerful caching library that can be used to speed up and lower the cost of chat applications that rely on the LLM service. GPTCache works as a memcache for AIGC applications, similar to how Redis works for traditional applications."
category = "main"
optional = false
python-versions = ">=3.8.1"
files = [
- {file = "gptcache-0.1.23-py3-none-any.whl", hash = "sha256:8bcd366e1dd5de432e113831afdea97493f090372a752a42b9ff16cb8c818635"},
- {file = "gptcache-0.1.23.tar.gz", hash = "sha256:5b5e3ef6f5df35f948bd203d1e33f3985459e60be436547529ff8b31f245238d"},
+ {file = "gptcache-0.1.24-py3-none-any.whl", hash = "sha256:070aad4867ab915a7b5db3a886e9f0289e52d1cb92a407c984b0241298079750"},
+ {file = "gptcache-0.1.24.tar.gz", hash = "sha256:aa591cb00898d457a50a5e0cd137d0119e86819c110ce6c7bce2adafeae0a467"},
]
[package.dependencies]
cachetools = "*"
numpy = "*"
-openai = "*"
requests = "*"
[[package]]
@@ -1528,14 +1527,14 @@ files = [
[[package]]
name = "ipykernel"
-version = "6.23.0"
+version = "6.23.1"
description = "IPython Kernel for Jupyter"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
- {file = "ipykernel-6.23.0-py3-none-any.whl", hash = "sha256:fc886f1dcdc0ec17f277e4d21fd071c857d381adcb04f3f3735d25325ca323c6"},
- {file = "ipykernel-6.23.0.tar.gz", hash = "sha256:bd6f487d9e2744c84f6e667d46462d7647a4c862e70e08282f05a52b9d4b705f"},
+ {file = "ipykernel-6.23.1-py3-none-any.whl", hash = "sha256:77aeffab056c21d16f1edccdc9e5ccbf7d96eb401bd6703610a21be8b068aadc"},
+ {file = "ipykernel-6.23.1.tar.gz", hash = "sha256:1aba0ae8453e15e9bc6b24e497ef6840114afcdb832ae597f32137fa19d42a6f"},
]
[package.dependencies]
@@ -1697,14 +1696,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "langchain"
-version = "0.0.166"
+version = "0.0.170"
description = "Building applications with LLMs through composability"
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
- {file = "langchain-0.0.166-py3-none-any.whl", hash = "sha256:32417cc38ba211d46c3e97f29cb8124175fe46047bda14a4c634351b005acd21"},
- {file = "langchain-0.0.166.tar.gz", hash = "sha256:fb1e90eb0aeef9c574e6683586bfbfed1974e187dd8261b571cb33888c35a92e"},
+ {file = "langchain-0.0.170-py3-none-any.whl", hash = "sha256:3543c14c08d39c0eef2d1a88a98161a25329720660811c546c8881d91c272c77"},
+ {file = "langchain-0.0.170.tar.gz", hash = "sha256:799e047857b0b12606255e4e843c7eb3724ddb85242c97dccd49b007e40486bf"},
]
[package.dependencies]
@@ -1719,29 +1718,28 @@ PyYAML = ">=5.4.1"
requests = ">=2,<3"
SQLAlchemy = ">=1.4,<3"
tenacity = ">=8.1.0,<9.0.0"
-tqdm = ">=4.48.0"
[package.extras]
-all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "docarray (>=0.31.0,<0.32.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "hnswlib (>=0.7.0,<0.8.0)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "lark (>=1.1.5,<2.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "protobuf (==3.19)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
+all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "docarray (>=0.31.0,<0.32.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "hnswlib (>=0.7.0,<0.8.0)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "lark (>=1.1.5,<2.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "protobuf (==3.19)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
azure = ["azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "openai (>=0,<1)"]
cohere = ["cohere (>=3,<4)"]
embeddings = ["sentence-transformers (>=2,<3)"]
-extended-testing = ["pdfminer-six (>=20221105,<20221106)", "pypdf (>=3.4.0,<4.0.0)"]
+extended-testing = ["jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "pdfminer-six (>=20221105,<20221106)", "pypdf (>=3.4.0,<4.0.0)", "tqdm (>=4.48.0)"]
hnswlib = ["docarray (>=0.31.0,<0.32.0)", "hnswlib (>=0.7.0,<0.8.0)", "protobuf (==3.19)"]
in-memory-store = ["docarray (>=0.31.0,<0.32.0)"]
llms = ["anthropic (>=0.2.6,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<3)", "transformers (>=4,<5)"]
-openai = ["openai (>=0,<1)"]
+openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
qdrant = ["qdrant-client (>=1.1.2,<2.0.0)"]
[[package]]
name = "llama-cpp-python"
-version = "0.1.23"
+version = "0.1.50"
description = "A Python wrapper for llama.cpp"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "llama_cpp_python-0.1.23.tar.gz", hash = "sha256:323a937e68e04251b5ad1804922e05d15c8b6bfbcf7c3e683a7b39a20e165ebf"},
+ {file = "llama_cpp_python-0.1.50.tar.gz", hash = "sha256:e305ae1b9f135f94afd8dd227701e6a1cd36db9c28f736b830ec364127c00bb9"},
]
[package.dependencies]
@@ -4840,4 +4838,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
-content-hash = "d914f734f4ff1bcbe8e678d46b20b73c3565b3af6f5dd0ac0359fae800c6bf2e"
+content-hash = "0524829f482c4eab8c25205d68dcec64084a28823de82cd3c3737e6bb74439c7"
diff --git a/pyproject.toml b/pyproject.toml
index d9a6dffcd..4c26bb6ec 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -29,7 +29,7 @@ google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.7.0"
gunicorn = "^20.1.0"
-langchain = "^0.0.166"
+langchain = "^0.0.170"
openai = "^0.27.2"
types-pyyaml = "^6.0.12.8"
dill = "^0.3.6"
@@ -37,7 +37,7 @@ pandas = "^1.5.3"
chromadb = "^0.3.21"
huggingface-hub = "^0.13.3"
rich = "^13.3.3"
-llama-cpp-python = "0.1.23"
+llama-cpp-python = "0.1.50"
networkx = "^3.1"
unstructured = "^0.5.11"
pypdf = "^3.7.1"
diff --git a/scripts/deploy_langflow_gcp.sh b/scripts/deploy_langflow_gcp.sh
index 2c3dc0420..fbf87099a 100644
--- a/scripts/deploy_langflow_gcp.sh
+++ b/scripts/deploy_langflow_gcp.sh
@@ -26,16 +26,16 @@ if [[ -z "$subnet_exists" ]]; then
gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
fi
-# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
-firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
-if [[ -z "$firewall_8080_exists" ]]; then
- gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
+# Create a firewall rule to allow TCP port 7860 for all instances in the VPC
+firewall_7860_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-7860" --format="value(name)")
+if [[ -z "$firewall_7860_exists" ]]; then
+ gcloud compute firewall-rules create allow-tcp-7860 --network $VPC_NAME --allow tcp:7860 --source-ranges 0.0.0.0/0 --direction INGRESS
fi
# Create a firewall rule to allow IAP traffic
firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
if [[ -z "$firewall_iap_exists" ]]; then
- gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
+ gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443,tcp:22,:tcp:3389 --source-ranges 35.235.240.0/20 --direction INGRESS
fi
# Define the startup script as a multiline Bash here-doc
@@ -49,24 +49,7 @@ apt -y upgrade
# Install Python 3 pip, Langflow, and Nginx
apt -y install python3-pip
pip install langflow
-apt-get -y install nginx
-
-# Configure Nginx for Langflow
-touch /etc/nginx/sites-available/langflow-app
-echo "server {
- listen 0.0.0.0:8080;
-
- location / {
- proxy_pass http://127.0.0.1:7860;
- proxy_set_header Host "\$host";
- proxy_set_header X-Real-IP "\$remote_addr";
- proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
- }
-}" >> /etc/nginx/sites-available/langflow-app
-ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
-sudo nginx -t
-sudo systemctl restart nginx
-langflow
+langflow --host 0.0.0.0 --port 7860
EOF
)
diff --git a/scripts/deploy_langflow_gcp_spot.sh b/scripts/deploy_langflow_gcp_spot.sh
index 065b6013f..9291ddcc3 100644
--- a/scripts/deploy_langflow_gcp_spot.sh
+++ b/scripts/deploy_langflow_gcp_spot.sh
@@ -26,16 +26,16 @@ if [[ -z "$subnet_exists" ]]; then
gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
fi
-# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
-firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
-if [[ -z "$firewall_8080_exists" ]]; then
- gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
+# Create a firewall rule to allow TCP port 7860 for all instances in the VPC
+firewall_7860_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-7860" --format="value(name)")
+if [[ -z "$firewall_7860_exists" ]]; then
+ gcloud compute firewall-rules create allow-tcp-7860 --network $VPC_NAME --allow tcp:7860 --source-ranges 0.0.0.0/0 --direction INGRESS
fi
# Create a firewall rule to allow IAP traffic
firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
if [[ -z "$firewall_iap_exists" ]]; then
- gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
+ gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443,tcp:22,:tcp:3389 --source-ranges 35.235.240.0/20 --direction INGRESS
fi
# Define the startup script as a multiline Bash here-doc
@@ -49,24 +49,7 @@ apt -y upgrade
# Install Python 3 pip, Langflow, and Nginx
apt -y install python3-pip
pip install langflow
-apt-get -y install nginx
-
-# Configure Nginx for Langflow
-touch /etc/nginx/sites-available/langflow-app
-echo "server {
- listen 0.0.0.0:8080;
-
- location / {
- proxy_pass http://127.0.0.1:7860;
- proxy_set_header Host "\$host";
- proxy_set_header X-Real-IP "\$remote_addr";
- proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
- }
-}" >> /etc/nginx/sites-available/langflow-app
-ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
-sudo nginx -t
-sudo systemctl restart nginx
-langflow
+langflow --host 0.0.0.0 --port 7860
EOF
)
@@ -84,7 +67,7 @@ gcloud compute instances create $VM_NAME \
--zone $ZONE \
--network $VPC_NAME \
--subnet $SUBNET_NAME \
- -preemptible
+ --preemptible
# Remove the temporary file after the VM is created
rm $tempfile
diff --git a/scripts/walkthroughtutorial.md b/scripts/walkthroughtutorial.md
index fa6e3c11d..83ea3086a 100644
--- a/scripts/walkthroughtutorial.md
+++ b/scripts/walkthroughtutorial.md
@@ -37,7 +37,7 @@ The script will:
1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
2. Create a startup script to install Python, Langflow, and Nginx
3. Create a Compute Engine VM instance with the specified configuration and startup script
-4. Configure Nginx to serve Langflow on TCP port 8080
+4. Run Langflow to serve content on TCP port 7860
> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
@@ -47,13 +47,13 @@ In the next step, you'll learn how to connect to the Langflow VM.
## Connect to the Langflow VM
To connect to your new Langflow VM, follow these steps:
-1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
+1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 7860
**or**
3. Run the following command to display the URL for your Langflow environment:
```bash
export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
-echo http://$LANGFLOW_IP:8080
+echo http://$LANGFLOW_IP:7860
```
4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
@@ -76,7 +76,7 @@ However, if you decide to remove them after completing the walkthrough, you can
> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
```
-gcloud compute firewall-rules delete allow-tcp-8080 --quiet
+gcloud compute firewall-rules delete allow-tcp-7860 --quiet
gcloud compute firewall-rules delete allow-iap --quiet
diff --git a/scripts/walkthroughtutorial_spot.md b/scripts/walkthroughtutorial_spot.md
index 751f03d78..3792bc1ca 100644
--- a/scripts/walkthroughtutorial_spot.md
+++ b/scripts/walkthroughtutorial_spot.md
@@ -29,7 +29,7 @@ Run the deploy_langflow_gcp_spot.sh script to configure the GCP environment and
```sh
gcloud config set project
-bash ./deploy_langflow_gcp.sh
+bash ./deploy_langflow_gcp_spot.sh
```
The script will:
@@ -37,7 +37,7 @@ The script will:
1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
2. Create a startup script to install Python, Langflow, and Nginx
3. Create a Compute Engine VM instance with the specified configuration and startup script
-4. Configure Nginx to serve Langflow on TCP port 8080
+4. Run Langflow to serve content on TCP port 7860
> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
@@ -46,13 +46,13 @@ In the next step, you'll learn how to connect to the Langflow VM.
## Connect to the Langflow VM
To connect to your new Langflow VM, follow these steps:
-1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
+1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 7860
**or**
3. Run the following command to display the URL for your Langflow environment:
```bash
export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
-echo http://$LANGFLOW_IP:8080
+echo http://$LANGFLOW_IP:7860
```
4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
@@ -73,7 +73,7 @@ However, if you decide to remove them after completing the walkthrough, you can
> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
```
-gcloud compute firewall-rules delete allow-tcp-8080 --quiet
+gcloud compute firewall-rules delete allow-tcp-7860 --quiet
gcloud compute firewall-rules delete allow-iap --quiet
diff --git a/src/backend/langflow/api/endpoints.py b/src/backend/langflow/api/endpoints.py
index eae0e9f60..02a775630 100644
--- a/src/backend/langflow/api/endpoints.py
+++ b/src/backend/langflow/api/endpoints.py
@@ -1,3 +1,4 @@
+from importlib.metadata import version
import logging
from fastapi import APIRouter, HTTPException
@@ -33,3 +34,14 @@ async def get_load(predict_request: PredictRequest):
# Log stack trace
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
+
+
+# get endpoint to return version of langflow
+@router.get("/version")
+def get_version():
+ return {"version": version("langflow")}
+
+
+@router.get("/health")
+def get_health():
+ return {"status": "OK"}
diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py
index d85ba8a56..0cbf7baca 100644
--- a/src/backend/langflow/interface/agents/custom.py
+++ b/src/backend/langflow/interface/agents/custom.py
@@ -17,7 +17,9 @@ from langchain.agents.agent_toolkits import (
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
-from langchain.agents.agent_toolkits.pandas.prompt import SUFFIX as PANDAS_SUFFIX
+from langchain.agents.agent_toolkits.pandas.prompt import (
+ SUFFIX_WITH_DF as PANDAS_SUFFIX,
+)
from langchain.agents.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX
from langchain.agents.agent_toolkits.vectorstore.prompt import (
PREFIX as VECTORSTORE_PREFIX,
diff --git a/src/backend/langflow/interface/loading.py b/src/backend/langflow/interface/loading.py
index 148efd5b7..6cd9246b8 100644
--- a/src/backend/langflow/interface/loading.py
+++ b/src/backend/langflow/interface/loading.py
@@ -54,7 +54,7 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
if base_type == "agents":
return instantiate_agent(class_object, params)
elif base_type == "prompts":
- return instantiate_prompt(class_object, node_type, params)
+ return instantiate_prompt(node_type, class_object, params)
elif base_type == "tools":
return instantiate_tool(node_type, class_object, params)
elif base_type == "toolkits":
@@ -77,7 +77,7 @@ def instantiate_agent(class_object, params):
return load_agent_executor(class_object, params)
-def instantiate_prompt(class_object, node_type, params):
+def instantiate_prompt(node_type, class_object, params):
if node_type == "ZeroShotPrompt":
if "tools" not in params:
params["tools"] = []
@@ -96,7 +96,7 @@ def instantiate_tool(node_type, class_object, params):
raise ValueError("Function should be a string")
elif node_type.lower() == "tool":
return class_object(**params)
- return None # Or some other default action
+ return class_object(**params)
def instantiate_toolkit(node_type, class_object, params):
diff --git a/src/frontend/dev.Dockerfile b/src/frontend/dev.Dockerfile
index 4773fc2b9..8678b02dd 100644
--- a/src/frontend/dev.Dockerfile
+++ b/src/frontend/dev.Dockerfile
@@ -23,4 +23,4 @@ RUN chmod +x set_proxy.sh && \
USER node
RUN npm install --loglevel warn
-CMD ["npm", "start"]
\ No newline at end of file
+CMD ["npm", "run", "dev:docker"]
\ No newline at end of file
diff --git a/src/frontend/public/index.html b/src/frontend/index.html
similarity index 69%
rename from src/frontend/public/index.html
rename to src/frontend/index.html
index b2e9c4b82..3b6e30308 100644
--- a/src/frontend/public/index.html
+++ b/src/frontend/index.html
@@ -4,12 +4,12 @@
-
+
LangFlow
-