Hotfixes: Dark Mode classes and Text Wrapper (#309)

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-05-17 11:36:09 -03:00 committed by GitHub
commit 830a2e3119
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
42 changed files with 1608 additions and 13976 deletions

17
.github/stale.yml vendored Normal file
View file

@ -0,0 +1,17 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 45
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- security
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View file

@ -1,14 +0,0 @@
FROM python:3.10-slim
RUN apt-get update && apt-get install gcc g++ git make -y
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
WORKDIR $HOME/app
COPY --chown=user . $HOME/app
RUN pip install langflow>==0.0.71 -U --user
CMD ["langflow", "--host", "0.0.0.0", "--port", "7860"]

View file

@ -6,14 +6,14 @@ This guide will help you set up a Langflow development VM in a Google Cloud Plat
## Standard VM
[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment.
<hr>
## Spot/Preemptible Instance
[![Open in Cloud Shell - Spot Instance](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
[![Open in Cloud Shell - Spot Instance](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts.

View file

@ -37,6 +37,15 @@ or
langflow
```
### Deploy Langflow on Google Cloud Platform
Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project.
[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
### Deploy Langflow on Google Cloud Platform
Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.

View file

@ -17,6 +17,8 @@ services:
dockerfile: ./dev.Dockerfile
args:
- BACKEND_URL=http://backend:7860
environment:
- VITE_PROXY_TARGET=http://backend:7860
ports:
- "3000:3000"
volumes:

View file

@ -1,7 +1,14 @@
FROM python:3.11-slim
FROM python:3.10-slim
RUN apt-get update && apt-get install gcc -y
RUN pip install langflow>=0.0.33
RUN apt-get update && apt-get install gcc g++ git make -y
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
EXPOSE 7860
CMD ["langflow", "--host", "0.0.0.0"]
WORKDIR $HOME/app
COPY --chown=user . $HOME/app
RUN pip install langflow>==0.0.71 -U --user
CMD ["langflow", "--host", "0.0.0.0", "--port", "7860"]

38
poetry.lock generated
View file

@ -511,14 +511,14 @@ files = [
[[package]]
name = "chromadb"
version = "0.3.22"
version = "0.3.23"
description = "Chroma."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "chromadb-0.3.22-py3-none-any.whl", hash = "sha256:54b58e562ab8a63194ce3b453633ce351475193de2184845f0577db969f1cf49"},
{file = "chromadb-0.3.22.tar.gz", hash = "sha256:41acb262c2c7bb41afecd50737f440dce3fdaa3d3fe1749d0e4be1ffc8699e63"},
{file = "chromadb-0.3.23-py3-none-any.whl", hash = "sha256:c1e04fddff0916243895bedeffc1977745328f62404d70981eb1a0cb9dcdfaf3"},
{file = "chromadb-0.3.23.tar.gz", hash = "sha256:87fa922c92e2e90fb48234b435e9d4f0c61646fbd1526062f53f63326fc21228"},
]
[package.dependencies]
@ -1180,20 +1180,19 @@ grpc = ["grpcio (>=1.44.0,<2.0.0dev)"]
[[package]]
name = "gptcache"
version = "0.1.23"
version = "0.1.24"
description = "GPTCache, a powerful caching library that can be used to speed up and lower the cost of chat applications that rely on the LLM service. GPTCache works as a memcache for AIGC applications, similar to how Redis works for traditional applications."
category = "main"
optional = false
python-versions = ">=3.8.1"
files = [
{file = "gptcache-0.1.23-py3-none-any.whl", hash = "sha256:8bcd366e1dd5de432e113831afdea97493f090372a752a42b9ff16cb8c818635"},
{file = "gptcache-0.1.23.tar.gz", hash = "sha256:5b5e3ef6f5df35f948bd203d1e33f3985459e60be436547529ff8b31f245238d"},
{file = "gptcache-0.1.24-py3-none-any.whl", hash = "sha256:070aad4867ab915a7b5db3a886e9f0289e52d1cb92a407c984b0241298079750"},
{file = "gptcache-0.1.24.tar.gz", hash = "sha256:aa591cb00898d457a50a5e0cd137d0119e86819c110ce6c7bce2adafeae0a467"},
]
[package.dependencies]
cachetools = "*"
numpy = "*"
openai = "*"
requests = "*"
[[package]]
@ -1528,14 +1527,14 @@ files = [
[[package]]
name = "ipykernel"
version = "6.23.0"
version = "6.23.1"
description = "IPython Kernel for Jupyter"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipykernel-6.23.0-py3-none-any.whl", hash = "sha256:fc886f1dcdc0ec17f277e4d21fd071c857d381adcb04f3f3735d25325ca323c6"},
{file = "ipykernel-6.23.0.tar.gz", hash = "sha256:bd6f487d9e2744c84f6e667d46462d7647a4c862e70e08282f05a52b9d4b705f"},
{file = "ipykernel-6.23.1-py3-none-any.whl", hash = "sha256:77aeffab056c21d16f1edccdc9e5ccbf7d96eb401bd6703610a21be8b068aadc"},
{file = "ipykernel-6.23.1.tar.gz", hash = "sha256:1aba0ae8453e15e9bc6b24e497ef6840114afcdb832ae597f32137fa19d42a6f"},
]
[package.dependencies]
@ -1697,14 +1696,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "langchain"
version = "0.0.166"
version = "0.0.170"
description = "Building applications with LLMs through composability"
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain-0.0.166-py3-none-any.whl", hash = "sha256:32417cc38ba211d46c3e97f29cb8124175fe46047bda14a4c634351b005acd21"},
{file = "langchain-0.0.166.tar.gz", hash = "sha256:fb1e90eb0aeef9c574e6683586bfbfed1974e187dd8261b571cb33888c35a92e"},
{file = "langchain-0.0.170-py3-none-any.whl", hash = "sha256:3543c14c08d39c0eef2d1a88a98161a25329720660811c546c8881d91c272c77"},
{file = "langchain-0.0.170.tar.gz", hash = "sha256:799e047857b0b12606255e4e843c7eb3724ddb85242c97dccd49b007e40486bf"},
]
[package.dependencies]
@ -1719,29 +1718,28 @@ PyYAML = ">=5.4.1"
requests = ">=2,<3"
SQLAlchemy = ">=1.4,<3"
tenacity = ">=8.1.0,<9.0.0"
tqdm = ">=4.48.0"
[package.extras]
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "docarray (>=0.31.0,<0.32.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "hnswlib (>=0.7.0,<0.8.0)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "lark (>=1.1.5,<2.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "protobuf (==3.19)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "docarray (>=0.31.0,<0.32.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "hnswlib (>=0.7.0,<0.8.0)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "lark (>=1.1.5,<2.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "protobuf (==3.19)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
azure = ["azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "openai (>=0,<1)"]
cohere = ["cohere (>=3,<4)"]
embeddings = ["sentence-transformers (>=2,<3)"]
extended-testing = ["pdfminer-six (>=20221105,<20221106)", "pypdf (>=3.4.0,<4.0.0)"]
extended-testing = ["jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "pdfminer-six (>=20221105,<20221106)", "pypdf (>=3.4.0,<4.0.0)", "tqdm (>=4.48.0)"]
hnswlib = ["docarray (>=0.31.0,<0.32.0)", "hnswlib (>=0.7.0,<0.8.0)", "protobuf (==3.19)"]
in-memory-store = ["docarray (>=0.31.0,<0.32.0)"]
llms = ["anthropic (>=0.2.6,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<3)", "transformers (>=4,<5)"]
openai = ["openai (>=0,<1)"]
openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
qdrant = ["qdrant-client (>=1.1.2,<2.0.0)"]
[[package]]
name = "llama-cpp-python"
version = "0.1.23"
version = "0.1.50"
description = "A Python wrapper for llama.cpp"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "llama_cpp_python-0.1.23.tar.gz", hash = "sha256:323a937e68e04251b5ad1804922e05d15c8b6bfbcf7c3e683a7b39a20e165ebf"},
{file = "llama_cpp_python-0.1.50.tar.gz", hash = "sha256:e305ae1b9f135f94afd8dd227701e6a1cd36db9c28f736b830ec364127c00bb9"},
]
[package.dependencies]
@ -4840,4 +4838,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "d914f734f4ff1bcbe8e678d46b20b73c3565b3af6f5dd0ac0359fae800c6bf2e"
content-hash = "0524829f482c4eab8c25205d68dcec64084a28823de82cd3c3737e6bb74439c7"

View file

@ -29,7 +29,7 @@ google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.7.0"
gunicorn = "^20.1.0"
langchain = "^0.0.166"
langchain = "^0.0.170"
openai = "^0.27.2"
types-pyyaml = "^6.0.12.8"
dill = "^0.3.6"
@ -37,7 +37,7 @@ pandas = "^1.5.3"
chromadb = "^0.3.21"
huggingface-hub = "^0.13.3"
rich = "^13.3.3"
llama-cpp-python = "0.1.23"
llama-cpp-python = "0.1.50"
networkx = "^3.1"
unstructured = "^0.5.11"
pypdf = "^3.7.1"

View file

@ -26,16 +26,16 @@ if [[ -z "$subnet_exists" ]]; then
gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
fi
# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
if [[ -z "$firewall_8080_exists" ]]; then
gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
# Create a firewall rule to allow TCP port 7860 for all instances in the VPC
firewall_7860_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-7860" --format="value(name)")
if [[ -z "$firewall_7860_exists" ]]; then
gcloud compute firewall-rules create allow-tcp-7860 --network $VPC_NAME --allow tcp:7860 --source-ranges 0.0.0.0/0 --direction INGRESS
fi
# Create a firewall rule to allow IAP traffic
firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
if [[ -z "$firewall_iap_exists" ]]; then
gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443,tcp:22,:tcp:3389 --source-ranges 35.235.240.0/20 --direction INGRESS
fi
# Define the startup script as a multiline Bash here-doc
@ -49,24 +49,7 @@ apt -y upgrade
# Install Python 3 pip, Langflow, and Nginx
apt -y install python3-pip
pip install langflow
apt-get -y install nginx
# Configure Nginx for Langflow
touch /etc/nginx/sites-available/langflow-app
echo "server {
listen 0.0.0.0:8080;
location / {
proxy_pass http://127.0.0.1:7860;
proxy_set_header Host "\$host";
proxy_set_header X-Real-IP "\$remote_addr";
proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
}
}" >> /etc/nginx/sites-available/langflow-app
ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
sudo nginx -t
sudo systemctl restart nginx
langflow
langflow --host 0.0.0.0 --port 7860
EOF
)

View file

@ -26,16 +26,16 @@ if [[ -z "$subnet_exists" ]]; then
gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
fi
# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
if [[ -z "$firewall_8080_exists" ]]; then
gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
# Create a firewall rule to allow TCP port 7860 for all instances in the VPC
firewall_7860_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-7860" --format="value(name)")
if [[ -z "$firewall_7860_exists" ]]; then
gcloud compute firewall-rules create allow-tcp-7860 --network $VPC_NAME --allow tcp:7860 --source-ranges 0.0.0.0/0 --direction INGRESS
fi
# Create a firewall rule to allow IAP traffic
firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
if [[ -z "$firewall_iap_exists" ]]; then
gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443,tcp:22,:tcp:3389 --source-ranges 35.235.240.0/20 --direction INGRESS
fi
# Define the startup script as a multiline Bash here-doc
@ -49,24 +49,7 @@ apt -y upgrade
# Install Python 3 pip, Langflow, and Nginx
apt -y install python3-pip
pip install langflow
apt-get -y install nginx
# Configure Nginx for Langflow
touch /etc/nginx/sites-available/langflow-app
echo "server {
listen 0.0.0.0:8080;
location / {
proxy_pass http://127.0.0.1:7860;
proxy_set_header Host "\$host";
proxy_set_header X-Real-IP "\$remote_addr";
proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
}
}" >> /etc/nginx/sites-available/langflow-app
ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
sudo nginx -t
sudo systemctl restart nginx
langflow
langflow --host 0.0.0.0 --port 7860
EOF
)
@ -84,7 +67,7 @@ gcloud compute instances create $VM_NAME \
--zone $ZONE \
--network $VPC_NAME \
--subnet $SUBNET_NAME \
-preemptible
--preemptible
# Remove the temporary file after the VM is created
rm $tempfile

View file

@ -37,7 +37,7 @@ The script will:
1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
2. Create a startup script to install Python, Langflow, and Nginx
3. Create a Compute Engine VM instance with the specified configuration and startup script
4. Configure Nginx to serve Langflow on TCP port 8080
4. Run Langflow to serve content on TCP port 7860
<walkthrough-pin-section-icon></walkthrough-pin-section-icon>
> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
@ -47,13 +47,13 @@ In the next step, you'll learn how to connect to the Langflow VM.
## Connect to the Langflow VM
To connect to your new Langflow VM, follow these steps:
1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 7860
<br>**or**
3. Run the following command to display the URL for your Langflow environment:
```bash
export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
echo http://$LANGFLOW_IP:8080
echo http://$LANGFLOW_IP:7860
```
4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
@ -76,7 +76,7 @@ However, if you decide to remove them after completing the walkthrough, you can
> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
```
gcloud compute firewall-rules delete allow-tcp-8080 --quiet
gcloud compute firewall-rules delete allow-tcp-7860 --quiet
gcloud compute firewall-rules delete allow-iap --quiet

View file

@ -29,7 +29,7 @@ Run the deploy_langflow_gcp_spot.sh script to configure the GCP environment and
```sh
gcloud config set project <walkthrough-project-id/>
bash ./deploy_langflow_gcp.sh
bash ./deploy_langflow_gcp_spot.sh
```
The script will:
@ -37,7 +37,7 @@ The script will:
1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
2. Create a startup script to install Python, Langflow, and Nginx
3. Create a Compute Engine VM instance with the specified configuration and startup script
4. Configure Nginx to serve Langflow on TCP port 8080
4. Run Langflow to serve content on TCP port 7860
> <walkthrough-pin-section-icon></walkthrough-pin-section-icon> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
@ -46,13 +46,13 @@ In the next step, you'll learn how to connect to the Langflow VM.
## Connect to the Langflow VM
To connect to your new Langflow VM, follow these steps:
1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 7860
<br>**or**
3. Run the following command to display the URL for your Langflow environment:
```bash
export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
echo http://$LANGFLOW_IP:8080
echo http://$LANGFLOW_IP:7860
```
4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
@ -73,7 +73,7 @@ However, if you decide to remove them after completing the walkthrough, you can
> <walkthrough-pin-section-icon></walkthrough-pin-section-icon> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
```
gcloud compute firewall-rules delete allow-tcp-8080 --quiet
gcloud compute firewall-rules delete allow-tcp-7860 --quiet
gcloud compute firewall-rules delete allow-iap --quiet

View file

@ -1,3 +1,4 @@
from importlib.metadata import version
import logging
from fastapi import APIRouter, HTTPException
@ -33,3 +34,14 @@ async def get_load(predict_request: PredictRequest):
# Log stack trace
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
# get endpoint to return version of langflow
@router.get("/version")
def get_version():
return {"version": version("langflow")}
@router.get("/health")
def get_health():
return {"status": "OK"}

View file

@ -17,7 +17,9 @@ from langchain.agents.agent_toolkits import (
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
from langchain.agents.agent_toolkits.pandas.prompt import SUFFIX as PANDAS_SUFFIX
from langchain.agents.agent_toolkits.pandas.prompt import (
SUFFIX_WITH_DF as PANDAS_SUFFIX,
)
from langchain.agents.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX
from langchain.agents.agent_toolkits.vectorstore.prompt import (
PREFIX as VECTORSTORE_PREFIX,

View file

@ -54,7 +54,7 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
if base_type == "agents":
return instantiate_agent(class_object, params)
elif base_type == "prompts":
return instantiate_prompt(class_object, node_type, params)
return instantiate_prompt(node_type, class_object, params)
elif base_type == "tools":
return instantiate_tool(node_type, class_object, params)
elif base_type == "toolkits":
@ -77,7 +77,7 @@ def instantiate_agent(class_object, params):
return load_agent_executor(class_object, params)
def instantiate_prompt(class_object, node_type, params):
def instantiate_prompt(node_type, class_object, params):
if node_type == "ZeroShotPrompt":
if "tools" not in params:
params["tools"] = []
@ -96,7 +96,7 @@ def instantiate_tool(node_type, class_object, params):
raise ValueError("Function should be a string")
elif node_type.lower() == "tool":
return class_object(**params)
return None # Or some other default action
return class_object(**params)
def instantiate_toolkit(node_type, class_object, params):

View file

@ -23,4 +23,4 @@ RUN chmod +x set_proxy.sh && \
USER node
RUN npm install --loglevel warn
CMD ["npm", "start"]
CMD ["npm", "run", "dev:docker"]

View file

@ -4,12 +4,12 @@
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<link rel="icon" href="/favicon.ico" />
<title>LangFlow</title>
</head>
<body id='body' style="width: 100%; height:100%">
<noscript>You need to enable JavaScript to run this app.</noscript>
<div style="width: 100vw; height:100vh" id='root'>
</div>
<div style="width: 100vw; height:100vh" id='root'></div>
<script type="module" src="/src/index.tsx"></script>
</body>
</html>

File diff suppressed because it is too large Load diff

View file

@ -11,13 +11,6 @@
"@tabler/icons-react": "^2.18.0",
"@tailwindcss/forms": "^0.5.3",
"@tailwindcss/line-clamp": "^0.4.4",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
"@types/jest": "^27.5.2",
"@types/node": "^16.18.12",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"ace-builds": "^1.16.0",
"ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
@ -32,23 +25,20 @@
"react-laag": "^2.0.5",
"react-markdown": "^8.0.7",
"react-router-dom": "^6.8.1",
"react-scripts": "5.0.1",
"react-syntax-highlighter": "^15.5.0",
"react-syntax-highlighter": "^15.5.0",
"react-tabs": "^6.0.0",
"reactflow": "^11.5.5",
"rehype-mathjax": "^4.0.2",
"remark-gfm": "^3.0.1",
"remark-math": "^5.1.1",
"tailwindcss": "^3.2.6",
"typescript": "^4.9.5",
"uuid": "^9.0.0",
"web-vitals": "^2.1.4"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
"dev:docker": "vite --host 0.0.0.0",
"start": "vite",
"build": "vite build",
"serve": "vite preview"
},
"eslintConfig": {
"extends": [
@ -70,6 +60,21 @@
},
"proxy": "http://127.0.0.1:7860",
"devDependencies": {
"@tailwindcss/typography": "^0.5.9"
"@tailwindcss/typography": "^0.5.9",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
"@types/jest": "^27.5.2",
"@types/lodash": "^4.14.194",
"@types/node": "^16.18.12",
"@types/react": "^18.0.28",
"@types/react-dom": "^18.0.11",
"@types/uuid": "^9.0.1",
"@vitejs/plugin-react-swc": "^3.0.0",
"autoprefixer": "^10.4.14",
"postcss": "^8.4.23",
"tailwindcss": "^3.3.2",
"typescript": "^5.0.2",
"vite": "^4.3.5"
}
}
}

View file

@ -0,0 +1,6 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View file

@ -2,6 +2,8 @@ import "reactflow/dist/style.css";
import { useState, useEffect, useContext } from "react";
import "./App.css";
import { useLocation } from "react-router-dom";
import _ from "lodash";
import ErrorAlert from "./alerts/error";
import NoticeAlert from "./alerts/notice";
import SuccessAlert from "./alerts/success";
@ -14,7 +16,6 @@ import CrashErrorComponent from "./components/CrashErrorComponent";
import { TabsContext } from "./contexts/tabsContext";
export default function App() {
var _ = require("lodash");
let { setCurrent, setShowSideBar, setIsStackedOpen } =
useContext(locationContext);

View file

@ -6,8 +6,8 @@ import {
import { useState } from "react";
import { ChatMessageType } from "../../../types/chat";
import { nodeColors } from "../../../utils";
var Convert = require("ansi-to-html");
var convert = new Convert({ newline: true });
import Convert from "ansi-to-html";
const convert = new Convert({ newline: true });
export default function ChatMessage({ chat }: { chat: ChatMessageType }) {
const [hidden, setHidden] = useState(true);

View file

@ -1,7 +1,7 @@
import { Transition } from "@headlessui/react";
import {
Bars3CenterLeftIcon,
ChatBubbleBottomCenterTextIcon,
Bars3CenterLeftIcon,
ChatBubbleBottomCenterTextIcon,
} from "@heroicons/react/24/outline";
import { nodeColors } from "../../../utils";
import { PopUpContext } from "../../../contexts/popUpContext";
@ -9,37 +9,37 @@ import { useContext } from "react";
import ChatModal from "../../../modals/chatModal";
export default function ChatTrigger({ open, setOpen }) {
const { openPopUp } = useContext(PopUpContext);
return (
<Transition
show={!open}
appear={true}
enter="transition ease-out duration-300"
enterFrom="translate-y-96"
enterTo="translate-y-0"
leave="transition ease-in duration-300"
leaveFrom="translate-y-0"
leaveTo="translate-y-96"
>
<div className="absolute bottom-2 right-3">
<div
style={{ backgroundColor: nodeColors["chat"] }}
className="border flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full dark:bg-gray-800 dark:border-gray-600 dark:text-white"
>
<button
onClick={() => {
setOpen(true);
}}
>
<div className="flex gap-3 items-center">
<ChatBubbleBottomCenterTextIcon
className="h-6 w-6 mt-1"
style={{ color: "white" }}
/>
</div>
</button>
</div>
</div>
</Transition>
);
const { openPopUp } = useContext(PopUpContext);
return (
<Transition
show={!open}
appear={true}
enter="transition ease-out duration-300"
enterFrom="translate-y-96"
enterTo="translate-y-0"
leave="transition ease-in duration-300"
leaveFrom="translate-y-0"
leaveTo="translate-y-96"
>
<div className="absolute bottom-6 right-3">
<div
style={{ backgroundColor: nodeColors["chat"] }}
className="border flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full dark:bg-gray-800 dark:border-gray-600 dark:text-white"
>
<button
onClick={() => {
setOpen(true);
}}
>
<div className="flex gap-3 items-center">
<ChatBubbleBottomCenterTextIcon
className="h-6 w-6 mt-1"
style={{ color: "white" }}
/>
</div>
</button>
</div>
</div>
</Transition>
);
}

View file

@ -4,7 +4,7 @@ import { ChatMessageType, ChatType } from "../../types/chat";
import ChatTrigger from "./chatTrigger";
import ChatModal from "../../modals/chatModal";
const _ = require("lodash");
import _ from "lodash";
export default function Chat({ flow }: ChatType) {
const [open, setOpen] = useState(false);

View file

@ -57,7 +57,7 @@ export default function CodeAreaComponent({
);
}}
>
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600" />
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600 dark:text-gray-300" />
</button>
</div>
</div>

View file

@ -3,7 +3,7 @@ import { useContext, useEffect, useState } from "react";
import { InputListComponentType } from "../../types/components";
import { TabsContext } from "../../contexts/tabsContext";
var _ = require("lodash");
import _ from "lodash";
export default function InputListComponent({
value,

View file

@ -58,7 +58,7 @@ export default function PromptAreaComponent({
);
}}
>
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600" />
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600 dark:text-gray-300" />
</button>
</div>
</div>

View file

@ -25,7 +25,7 @@ export default function TextAreaComponent({ value, onChange, disabled }:TextArea
{myValue !== "" ? myValue : 'Text empty'}
</span>
<button onClick={()=>{openPopUp(<TextAreaModal value={myValue} setValue={(t:string) => {setMyValue(t); onChange(t);}}/>)}}>
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600" />
<ArrowTopRightOnSquareIcon className="w-6 h-6 hover:text-blue-600 dark:text-gray-300" />
</button>
</div>
</div>

View file

@ -1,7 +1,7 @@
import { createContext, ReactNode, useState } from "react";
import { AlertItemType } from "../types/alerts";
var _ = require("lodash");
import _ from "lodash";
//types for alertContextType
type alertContextType = {

View file

@ -12,7 +12,7 @@ import { normalCaseToSnakeCase, updateObject, updateTemplate } from "../utils";
import { alertContext } from "./alertContext";
import { typesContext } from "./typesContext";
import { APITemplateType, TemplateVariableType } from "../types/api";
const { v4: uuidv4 } = require("uuid");
import { v4 as uuidv4 } from "uuid";
const TabsContextInitialValue: TabsContextType = {
save: () => {},

View file

@ -1,3 +1,7 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen",

View file

@ -5,6 +5,8 @@ import reportWebVitals from "./reportWebVitals";
import { BrowserRouter } from "react-router-dom";
import ContextWrapper from "./contexts";
import './index.css';
const root = ReactDOM.createRoot(
document.getElementById("root") as HTMLElement
);

View file

@ -7,7 +7,7 @@ import "ace-builds/src-noconflict/mode-python";
import "ace-builds/src-noconflict/theme-github";
import "ace-builds/src-noconflict/theme-twilight";
import "ace-builds/src-noconflict/ext-language_tools";
import "ace-builds/webpack-resolver";
// import "ace-builds/webpack-resolver";
import { darkContext } from "../../contexts/darkContext";
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import { oneDark } from 'react-syntax-highlighter/dist/cjs/styles/prism';

View file

@ -11,10 +11,10 @@ import rehypeMathjax from "rehype-mathjax";
import remarkGfm from "remark-gfm";
import remarkMath from "remark-math";
import { CodeBlock } from "./codeBlock";
var Convert = require("ansi-to-html");
var convert = new Convert({ newline: true });
import Convert from "ansi-to-html";
export default function ChatMessage({ chat, lockChat }: { chat: ChatMessageType, lockChat: boolean }) {
const convert = new Convert({ newline: true });
const [message, setMessage] = useState("");
const imgRef = useRef(null);
useEffect(() => {
@ -36,9 +36,9 @@ export default function ChatMessage({ chat, lockChat }: { chat: ChatMessageType,
)}
>
{!chat.isSend && <div className="relative w-8 h-8">
<img className={"absolute transition-opacity duration-500 scale-150 " + (lockChat ? "opacity-100" : "opacity-0")} src={AiIcon} />
<img className={"absolute transition-opacity duration-500 scale-150 " + (lockChat ? "opacity-0" : "opacity-100")} src={AiIconStill} />
</div>}
<img className={"absolute transition-opacity duration-500 scale-150 " + (lockChat ? "opacity-100" : "opacity-0")} src={AiIcon} />
<img className={"absolute transition-opacity duration-500 scale-150 " + (lockChat ? "opacity-0" : "opacity-100")} src={AiIconStill} />
</div>}
{chat.isSend && <UserIcon className="w-6 h-6 -mb-1 text-gray-800 dark:text-gray-200" />}
</div>
{!chat.isSend ? (

View file

@ -12,7 +12,7 @@ import { sendAllProps } from "../../types/api";
import { ChatMessageType, ChatType } from "../../types/chat";
import ChatInput from "./chatInput";
const _ = require("lodash");
import _ from "lodash";
export default function ChatModal({
flow,

View file

@ -7,7 +7,7 @@ import "ace-builds/src-noconflict/mode-python";
import "ace-builds/src-noconflict/theme-github";
import "ace-builds/src-noconflict/theme-twilight";
import "ace-builds/src-noconflict/ext-language_tools";
import "ace-builds/webpack-resolver";
// import "ace-builds/webpack-resolver";
import { darkContext } from "../../contexts/darkContext";
import { checkCode } from "../../controllers/API";
import { alertContext } from "../../contexts/alertContext";

View file

@ -92,10 +92,10 @@ export default function ButtonBox({
<div className={textColor}>{icon}</div>
</div>
</div>
<div className="mt-auto mb-auto">
<div className="w-full mt-auto mb-auto">
<h3
className={classNames(
"font-semibold text-white dark:text-white/80",
"w-full font-semibold break-words text-white dark:text-white/80",
titleFontSize,
marginTop
)}

View file

@ -3,7 +3,7 @@ import { useContext, useState } from "react";
import { TabsContext } from "../../../../contexts/tabsContext";
import { FlowType } from "../../../../types/flow";
var _ = require("lodash");
import _ from "lodash";
export default function TabComponent({
selected,

View file

@ -17,6 +17,7 @@ import ReactFlow, {
OnNodesDelete,
SelectionDragHandler,
} from "reactflow";
import _ from "lodash";
import { locationContext } from "../../contexts/locationContext";
import ExtraSidebar from "./components/extraSidebarComponent";
import Chat from "../../components/chatComponent";
@ -34,7 +35,6 @@ const nodeTypes = {
genericNode: GenericNode,
};
var _ = require("lodash");
export default function FlowPage({ flow }: { flow: FlowType }) {
let { updateFlow, incrementNodeId, disableCP} =
@ -323,6 +323,7 @@ export default function FlowPage({ flow }: { flow: FlowType }) {
onDrop={onDrop}
onNodesDelete={onDelete}
selectNodesOnDrag={false}
className="theme-attribution"
>
<Background className="dark:bg-gray-900" />
<Controls className="[&>button]:text-black [&>button]:dark:bg-gray-800 hover:[&>button]:dark:bg-gray-700 [&>button]:dark:text-gray-400 [&>button]:dark:fill-gray-400 [&>button]:dark:border-gray-600">

View file

@ -19,7 +19,7 @@ import {
import { Connection, Edge, Node, ReactFlowInstance } from "reactflow";
import { FlowType } from "./types/flow";
import { APITemplateType, TemplateVariableType } from "./types/api";
var _ = require("lodash");
import _ from "lodash";
export function classNames(...classes: Array<string>) {
return classes.filter(Boolean).join(" ");

View file

@ -1,7 +1,7 @@
/** @type {import('tailwindcss').Config} */
const plugin = require("tailwindcss/plugin");
import plugin from "tailwindcss/plugin";
module.exports = {
content: ["./src/**/*.{js,ts,tsx,jsx}"],
content: ["./index.html", "./src/**/*.{js,ts,tsx,jsx}"],
darkMode: "class",
important: true,
theme: {
@ -52,32 +52,39 @@ module.exports = {
margin: 0,
},
},
'.password':{
"-webkit-text-security":"disc",
"font-family": "text-security-disc"
".password": {
"-webkit-text-security": "disc",
"font-family": "text-security-disc",
},
'.stop': {
'-webkit-animation-play-state': 'paused',
'-moz-animation-play-state': 'paused',
'animation-play-state': 'paused',
".stop": {
"-webkit-animation-play-state": "paused",
"-moz-animation-play-state": "paused",
"animation-play-state": "paused",
},
'.custom-scroll':{
'&::-webkit-scrollbar': {
'width': '8px',
".custom-scroll": {
"&::-webkit-scrollbar": {
width: "8px",
},
'&::-webkit-scrollbar-track': {
'backgroundColor': '#f1f1f1',
"&::-webkit-scrollbar-track": {
backgroundColor: "#f1f1f1",
},
'&::-webkit-scrollbar-thumb': {
'backgroundColor': '#ccc',
'borderRadius': '999px',
"&::-webkit-scrollbar-thumb": {
backgroundColor: "#ccc",
borderRadius: "999px",
},
'&::-webkit-scrollbar-thumb:hover': {
'backgroundColor': '#bbb'
}
}
})
}),require('@tailwindcss/line-clamp'),require('@tailwindcss/typography'),
"&::-webkit-scrollbar-thumb:hover": {
backgroundColor: "#bbb",
},
},
".dark .theme-attribution .react-flow__attribution": {
backgroundColor: "rgba(255, 255, 255, 0.2)",
},
".dark .theme-attribution .react-flow__attribution a": {
color: "black",
},
});
}),
require("@tailwindcss/line-clamp"),
require("@tailwindcss/typography"),
],
};

View file

@ -0,0 +1,39 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react-swc";
const apiRoutes = [
"/all",
"/predict",
"^/validate/*",
"^/chat/*",
"/version",
"/health",
];
// Use environment variable to determine the target.
const target = process.env.VITE_PROXY_TARGET || "http://127.0.0.1:7860";
const proxyTargets = apiRoutes.reduce((proxyObj, route) => {
proxyObj[route] = {
target: target,
changeOrigin: true,
secure: false,
ws: true,
};
return proxyObj;
}, {});
export default defineConfig(() => {
return {
build: {
outDir: "build",
},
plugins: [react()],
server: {
port: 3000,
proxy: {
...proxyTargets,
},
},
};
});