diff --git a/.githooks/pre-commit b/.githooks/pre-commit
old mode 100644
new mode 100755
diff --git a/.github/workflows/deploy_gh-pages.yml b/.github/workflows/deploy_gh-pages.yml
new file mode 100644
index 000000000..ffb52f941
--- /dev/null
+++ b/.github/workflows/deploy_gh-pages.yml
@@ -0,0 +1,39 @@
+name: Deploy to GitHub Pages
+
+on:
+ push:
+ branches:
+ - main
+ # Review gh actions docs if you want to further define triggers, paths, etc
+ # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on
+
+jobs:
+ deploy:
+ name: Deploy to GitHub Pages
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-node@v3
+ with:
+ node-version: 18
+ cache: npm
+ cache-dependency-path: ./docs/package-lock.json
+
+ - name: Install dependencies
+ run: cd docs && npm install
+ - name: Build website
+ run: cd docs && npm run build
+
+ # Popular action to deploy to GitHub Pages:
+ # Docs: https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-docusaurus
+ - name: Deploy to GitHub Pages
+ uses: peaceiris/actions-gh-pages@v3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ # Build output to publish to the `gh-pages` branch:
+ publish_dir: ./docs/build
+ # The following lines assign commit authorship to the official
+ # GH-Actions bot for deploys to `gh-pages` branch:
+ # https://github.com/actions/checkout/issues/13#issuecomment-724415212
+ # The GH actions bot is used by default if you didn't specify the two fields.
+ # You can swap them out with your own user credentials.
diff --git a/.gitignore b/.gitignore
index b730d67bc..9d71e19b2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,5 @@
+# This is to avoid Opencommit hook from getting pushed
+prepare-commit-msg
# Logs
logs
*.log
@@ -242,4 +244,10 @@ dmypy.json
# Poetry
.testenv/*
langflow.db
-langchain.db
\ No newline at end of file
+
+
+.githooks/prepare-commit-msg
+.langchain.db
+
+# docusaurus
+.docusaurus/
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 000000000..82dfe1f85
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,31 @@
+# Read the Docs configuration file for Sphinx projects
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Set the OS, Python version and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+ # You can also specify other tool versions:
+ # nodejs: "19"
+ # rust: "1.64"
+ # golang: "1.19"
+
+# Build documentation in the "docs/" directory with Sphinx
+sphinx:
+ configuration: docs/conf.py
+
+# Optionally build your docs in additional formats such as PDF and ePub
+# formats:
+# - pdf
+# - epub
+
+# Optional but recommended, declare the Python requirements required
+# to build your documentation
+# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
+# python:
+# install:
+# - requirements: docs/requirements.txt
\ No newline at end of file
diff --git a/GCP_DEPLOYMENT.md b/GCP_DEPLOYMENT.md
index 36c81e19f..e00e9b1f8 100644
--- a/GCP_DEPLOYMENT.md
+++ b/GCP_DEPLOYMENT.md
@@ -13,7 +13,7 @@ This script sets up a Debian-based VM with the Langflow package, Nginx, and the
## Spot/Preemptible Instance
-[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts.
diff --git a/Makefile b/Makefile
index bfed91f37..79e27833e 100644
--- a/Makefile
+++ b/Makefile
@@ -5,6 +5,8 @@ all: help
init:
@echo 'Installing pre-commit hooks'
git config core.hooksPath .githooks
+ @echo 'Making pre-commit hook executable'
+ chmod +x .githooks/pre-commit
@echo 'Installing backend dependencies'
make install_backend
@echo 'Installing frontend dependencies'
diff --git a/README.md b/README.md
index e53f3b535..b07ec482d 100644
--- a/README.md
+++ b/README.md
@@ -27,14 +27,17 @@
# Table of Contents
+- [โ๏ธ Langflow](#๏ธ-langflow)
+- [Table of Contents](#table-of-contents)
- [๐ฆ Installation](#-installation)
- [Locally](#locally)
+ - [HuggingFace Spaces](#huggingface-spaces)
- [๐ฅ๏ธ Command Line Interface (CLI)](#๏ธ-command-line-interface-cli)
- [Usage](#usage)
- [Environment Variables](#environment-variables)
- [Deployment](#deployment)
- - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
- - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
+ - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
+ - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
- [API Usage](#api-usage)
- [๐จ Creating Flows](#-creating-flows)
- [๐ Contributing](#-contributing)
@@ -59,6 +62,8 @@ or
langflow # or langflow --help
```
+### HuggingFace Spaces
+You can also check it out on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow) and run it in your browser! You can even clone it and have your own copy of Langflow to play with.
# ๐ฅ๏ธ Command Line Interface (CLI)
@@ -101,7 +106,7 @@ A sample `.env` file named `.env.example` is included with the project. Copy thi
# Deployment
-### Deploy Langflow on Google Cloud Platform
+## Deploy Langflow on Google Cloud Platform
Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
@@ -110,7 +115,7 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
-### Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
+## Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud.
@@ -217,8 +222,15 @@ print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
> Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
+## Deploy on Railway
+[](https://railway.app/template/Emy2sU?referralCode=MnPSdg)
-## ๐จ Creating Flows
+## Deploy on Render
+
+
+
+
+# ๐จ Creating Flows
Creating flows with Langflow is easy. Simply drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://langchain.readthedocs.io/en/latest/reference.html) to choose from, including LLMs, prompt serializers, agents, and chains.
@@ -237,7 +249,7 @@ flow("Hey, have you heard of Langflow?")
```
-## ๐ Contributing
+# ๐ Contributing
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible.
@@ -250,6 +262,6 @@ Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask question
[](https://star-history.com/#logspace-ai/langflow&Date)
-## ๐ License
+# ๐ License
Langflow is released under the MIT License. See the LICENSE file for details.
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 000000000..aaba2fa1e
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,41 @@
+# Website
+
+This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
+
+### Installation
+
+```
+$ yarn
+```
+
+### Local Development
+
+```
+$ yarn start
+```
+
+This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
+
+### Build
+
+```
+$ yarn build
+```
+
+This command generates static content into the `build` directory and can be served using any static contents hosting service.
+
+### Deployment
+
+Using SSH:
+
+```
+$ USE_SSH=true yarn deploy
+```
+
+Not using SSH:
+
+```
+$ GIT_USER= yarn deploy
+```
+
+If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
diff --git a/docs/babel.config.js b/docs/babel.config.js
new file mode 100644
index 000000000..e00595dae
--- /dev/null
+++ b/docs/babel.config.js
@@ -0,0 +1,3 @@
+module.exports = {
+ presets: [require.resolve('@docusaurus/core/lib/babel/preset')],
+};
diff --git a/docs/docs/components/agents.mdx b/docs/docs/components/agents.mdx
new file mode 100644
index 000000000..70d6dff91
--- /dev/null
+++ b/docs/docs/components/agents.mdx
@@ -0,0 +1,2 @@
+# Agents
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/chains.mdx b/docs/docs/components/chains.mdx
new file mode 100644
index 000000000..997066cbd
--- /dev/null
+++ b/docs/docs/components/chains.mdx
@@ -0,0 +1,2 @@
+# Chains
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/embeddings.mdx b/docs/docs/components/embeddings.mdx
new file mode 100644
index 000000000..c4477db7c
--- /dev/null
+++ b/docs/docs/components/embeddings.mdx
@@ -0,0 +1,2 @@
+# Embeddings
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/llms.mdx b/docs/docs/components/llms.mdx
new file mode 100644
index 000000000..b6a16aa6c
--- /dev/null
+++ b/docs/docs/components/llms.mdx
@@ -0,0 +1,2 @@
+# LLMs
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/loaders.mdx b/docs/docs/components/loaders.mdx
new file mode 100644
index 000000000..8e9289875
--- /dev/null
+++ b/docs/docs/components/loaders.mdx
@@ -0,0 +1,2 @@
+# Loaders
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/memories.mdx b/docs/docs/components/memories.mdx
new file mode 100644
index 000000000..6036ddf46
--- /dev/null
+++ b/docs/docs/components/memories.mdx
@@ -0,0 +1,2 @@
+# Memories
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/prompts.mdx b/docs/docs/components/prompts.mdx
new file mode 100644
index 000000000..d6d702f21
--- /dev/null
+++ b/docs/docs/components/prompts.mdx
@@ -0,0 +1,2 @@
+# Prompts
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/text-splitters.mdx b/docs/docs/components/text-splitters.mdx
new file mode 100644
index 000000000..515271233
--- /dev/null
+++ b/docs/docs/components/text-splitters.mdx
@@ -0,0 +1,2 @@
+# Text Splitters
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/toolkits.mdx b/docs/docs/components/toolkits.mdx
new file mode 100644
index 000000000..b7c8bb7f9
--- /dev/null
+++ b/docs/docs/components/toolkits.mdx
@@ -0,0 +1,2 @@
+# Toolkits
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/tools.mdx b/docs/docs/components/tools.mdx
new file mode 100644
index 000000000..9c6538280
--- /dev/null
+++ b/docs/docs/components/tools.mdx
@@ -0,0 +1,2 @@
+# Tools
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/utilities.mdx b/docs/docs/components/utilities.mdx
new file mode 100644
index 000000000..a25048286
--- /dev/null
+++ b/docs/docs/components/utilities.mdx
@@ -0,0 +1,2 @@
+# Utilities
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/vector-stores.mdx b/docs/docs/components/vector-stores.mdx
new file mode 100644
index 000000000..221a89bcc
--- /dev/null
+++ b/docs/docs/components/vector-stores.mdx
@@ -0,0 +1,2 @@
+# Vector Stores
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/components/wrappers.mdx b/docs/docs/components/wrappers.mdx
new file mode 100644
index 000000000..7abde7a69
--- /dev/null
+++ b/docs/docs/components/wrappers.mdx
@@ -0,0 +1,2 @@
+# Wrappers
+(coming soon)
\ No newline at end of file
diff --git a/docs/docs/contributing/community.md b/docs/docs/contributing/community.md
new file mode 100644
index 000000000..fb18b1172
--- /dev/null
+++ b/docs/docs/contributing/community.md
@@ -0,0 +1,38 @@
+# Community
+
+## ๐ค Join **Langflow** Discord server
+
+ Join us to ask questions and showcase your projects.
+
+ Let's bring together the building blocks of AI integration!
+
+ Langflow [Discord](https://discord.gg/EqksyE2EX9) server.
+
+---
+
+## ๐ฆ Stay tunned for **Langflow** on Twitter
+
+Follow [@logspace_ai](https://twitter.com/logspace_ai) on **Twitter** to get the latest news about **Langflow**.
+
+---
+## โญ๏ธ Star **Langflow** on GitHub
+
+You can "star" **Langflow** in [GitHub](https://github.com/logspace-ai/langflow).
+
+By adding a star, other users will be able to find it more easily and see that it has been already useful for others.
+
+---
+
+## ๐ Watch the GitHub repository for releases
+
+You can "watch" **Langflow** in [GitHub](https://github.com/logspace-ai/langflow).
+
+
+If you select "Watching" instead of "Releases only" you will receive notifications when someone creates a new issue or question. You can also specify that you only want to be notified about new issues, discussions, PRs, etc.
+
+
+Then you can try and help them solve those questions.
+
+---
+
+Thanks! ๐
\ No newline at end of file
diff --git a/docs/docs/contributing/github-issues.md b/docs/docs/contributing/github-issues.md
new file mode 100644
index 000000000..41cc674e1
--- /dev/null
+++ b/docs/docs/contributing/github-issues.md
@@ -0,0 +1,27 @@
+# GitHub Issues
+
+Our [issues](https://github.com/logspace-ai/langflow/issues) page is kept up to date
+with bugs, improvements, and feature requests. There is a taxonomy of labels to help
+with sorting and discovery of issues of interest.
+
+If you're looking for help with your code, consider posting a question on the
+[GitHub Discussions board](https://github.com/logspace-ai/langflow/discussions). Please
+understand that we won't be able to provide individual support via email. We
+also believe that help is much more valuable if it's **shared publicly**,
+so that more people can benefit from it.
+
+- **Describing your issue:** Try to provide as many details as possible. What
+ exactly goes wrong? _How_ is it failing? Is there an error?
+ "XY doesn't work" usually isn't that helpful for tracking down problems. Always
+ remember to include the code you ran and if possible, extract only the relevant
+ parts and don't just dump your entire script. This will make it easier for us to
+ reproduce the error.
+
+- **Sharing long blocks of code or logs:** If you need to include long code,
+ logs or tracebacks, you can wrap them in `` and ``. This
+ [collapses the content](https://developer.mozilla.org/en/docs/Web/HTML/Element/details) so it only becomes visible on click, making the issue easier to read and follow.
+
+
+## Issue labels
+
+[See this page](https://github.com/logspace-ai/langflow/labels) for an overview of the system we use to tag our issues and pull requests.
\ No newline at end of file
diff --git a/docs/docs/contributing/how-contribute.md b/docs/docs/contributing/how-contribute.md
new file mode 100644
index 000000000..cdccc271f
--- /dev/null
+++ b/docs/docs/contributing/how-contribute.md
@@ -0,0 +1,62 @@
+# How to contribute?
+
+๐ Hello there! We welcome contributions from developers of all levels to our open-source project on [GitHub](https://github.com/logspace-ai/langflow). If you'd like to contribute, please check our contributing guidelines and help make Langflow more accessible.
+
+As an open-source project in a rapidly developing field, we are extremely open
+to contributions, whether in the form of a new feature, improved infra, or better documentation.
+
+To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow.
+
+Please do not try to push directly to this repo unless you are a maintainer.
+
+---
+## Local development
+
+You can develop Langflow using docker compose, or locally.
+
+We provide a .vscode/launch.json file for debugging the backend in VSCode, which is a lot faster than using docker compose.
+
+Setting up hooks:
+```bash
+make init
+```
+
+This will install the pre-commit hooks, which will run `make format` on every commit.
+
+It is advised to run `make lint` before pushing to the repository.
+
+---
+
+## Run locally
+
+Langflow can run locally by cloning the repository and installing the dependencies. We recommend using a virtual environment to isolate the dependencies from your system.
+
+Before you start, make sure you have the following installed:
+
+- Poetry (>=1.4)
+- Node.js
+
+Then install the dependencies and start the development server for the backend:
+
+```bash
+make install_backend
+make backend
+```
+
+And the frontend:
+
+```bash
+make frontend
+```
+
+---
+
+## Docker compose
+
+The following snippet will run the backend and frontend in separate containers. The frontend will be available at `localhost:3000` and the backend at `localhost:7860`.
+
+```bash
+docker compose up --build
+# or
+make dev build=1
+```
\ No newline at end of file
diff --git a/docs/docs/deployment/gcp-deployment.md b/docs/docs/deployment/gcp-deployment.md
new file mode 100644
index 000000000..771550f24
--- /dev/null
+++ b/docs/docs/deployment/gcp-deployment.md
@@ -0,0 +1,35 @@
+# Deploy on Google Cloud Platform
+
+## Run Langflow from a New Google Cloud Project
+
+This guide will help you set up a Langflow development VM in a Google Cloud Platform project using Google Cloud Shell.
+
+:::note
+When Cloud Shell opens, be sure to select **Trust repo**. Some `gcloud` commands might not run in an ephemeral Cloud Shell environment.
+:::
+
+
+## Standard VM
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+
+This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment.
+
+---
+
+## Spot/Preemptible Instance
+
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
+
+When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts.
+
+---
+
+## Pricing (approximate)
+> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
+
+
+| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
+| -------------- | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | ----- |
+| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs |
+| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance |
+| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month |
diff --git a/docs/docs/deployment/jina-deployment.md b/docs/docs/deployment/jina-deployment.md
new file mode 100644
index 000000000..bf9df051d
--- /dev/null
+++ b/docs/docs/deployment/jina-deployment.md
@@ -0,0 +1,101 @@
+# Deploy on Jina AI Cloud
+
+Langflow integrates with langchain-serve to provide a one-command deployment to [Jina AI Cloud](https://github.com/jina-ai/langchain-serve).
+
+Start by installing `langchain-serve` with
+
+```bash
+pip install -U langchain-serve
+```
+
+Then, run:
+
+```bash
+langflow --jcloud
+```
+
+```text
+๐ Langflow server successfully deployed on Jina AI Cloud ๐
+๐ Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://.wolf.jina.ai/
+๐ Read more about managing the server: https://github.com/jina-ai/langchain-serve
+```
+
+**Complete (example) output:**
+
+```text
+ ๐ Deploying Langflow server on Jina AI Cloud
+ โญโโโโโโโโโโโโโโโโโโโโโโโโโ ๐ Flow is available! โโโโโโโโโโโโโโโโโโโโโโโโโโโฎ
+ โ โ
+ โ ID langflow-e3dd8820ec โ
+ โ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai โ
+ โ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec โ
+ โ โ
+ โฐโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฏ
+ โญโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฎ
+ โ App ID โ langflow-e3dd8820ec โ
+ โโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
+ โ Phase โ Serving โ
+ โโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
+ โ Endpoint โ wss://langflow-e3dd8820ec.wolf.jina.ai โ
+ โโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
+ โ App logs โ dashboards.wolf.jina.ai โ
+ โโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
+ โ Swagger UI โ https://langflow-e3dd8820ec.wolf.jina.ai/docs โ
+ โโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโค
+ โ OpenAPI JSON โ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json โ
+ โฐโโโโโโโโโโโโโโโดโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฏ
+
+ ๐ Langflow server successfully deployed on Jina AI Cloud ๐
+ ๐ Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/
+ ๐ Read more about managing the server: https://github.com/jina-ai/langchain-serve
+ ```
+## API Usage (with python)
+
+You can use Langflow directly on your browser or the API endpoints on Jina AI Cloud to interact with the server.
+
+```python
+import requests
+
+BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict"
+FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468"
+# You can tweak the flow by adding a tweaks dictionary
+# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
+TWEAKS = {
+"ChatOpenAI-g4jEr": {},
+"ConversationChain-UidfJ": {}
+}
+
+def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
+ """
+ Run a flow with a given message and optional tweaks.
+
+ :param message: The message to send to the flow
+ :param flow_id: The ID of the flow to run
+ :param tweaks: Optional tweaks to customize the flow
+ :return: The JSON response from the flow
+ """
+ api_url = f"{BASE_API_URL}/{flow_id}"
+
+ payload = {"message": message}
+
+ if tweaks:
+ payload["tweaks"] = tweaks
+
+ response = requests.post(api_url, json=payload)
+ return response.json()
+
+# Setup any tweaks you want to apply to the flow
+print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
+ ```
+
+ ```json
+{
+ "result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!"
+}
+ ```
+
+:::info
+
+Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
+
+:::
\ No newline at end of file
diff --git a/docs/docs/examples/buffer-memory.mdx b/docs/docs/examples/buffer-memory.mdx
new file mode 100644
index 000000000..c3e886cf9
--- /dev/null
+++ b/docs/docs/examples/buffer-memory.mdx
@@ -0,0 +1,25 @@
+# Buffer Memory
+
+For certain applications, retaining past interactions is crucial. For that, chains and agents may accept a memory component as one of their input parameters. The `ConversationBufferMemory` component is one of them. It stores messages and extracts them into variables.
+
+## โ๏ธ Langflow Example
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+#### Download Flow
+
+:::note LangChain Components ๐ฆ๐
+
+- [`ConversationBufferMemory`](https://python.langchain.com/docs/modules/memory/how_to/buffer)
+- [`ConversationChain`](https://python.langchain.com/docs/modules/chains/)
+- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai)
+ :::
diff --git a/docs/docs/examples/conversation-chain.mdx b/docs/docs/examples/conversation-chain.mdx
new file mode 100644
index 000000000..b8cbb11bb
--- /dev/null
+++ b/docs/docs/examples/conversation-chain.mdx
@@ -0,0 +1,28 @@
+# Conversation Chain
+
+This example shows how to instantiate a simple `ConversationChain` component using a Language Model (LLM). Once the Node Status turns green ๐ข, the chat will be ready to take in user messages. Here, we used `ChatOpenAI` to act as the required LLM input, but you can use any LLM for this purpose.
+
+:::info
+Make sure to always get the API key from the provider.
+:::
+
+## โ๏ธ Langflow Example
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+#### Download Flow
+
+:::note LangChain Components ๐ฆ๐
+
+- [`ConversationChain`](https://python.langchain.com/docs/modules/chains/)
+- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai)
+ :::
diff --git a/docs/docs/examples/csv-loader.mdx b/docs/docs/examples/csv-loader.mdx
new file mode 100644
index 000000000..de808ec3d
--- /dev/null
+++ b/docs/docs/examples/csv-loader.mdx
@@ -0,0 +1,42 @@
+# CSV Loader
+
+The `VectoStoreAgent` component retrieves information from one or more vector stores. This example shows a `VectoStoreAgent` connected to a CSV file through the `Chroma` vector store. Process description:
+
+- The `CSVLoader` loads a CSV file into a list of documents.
+- The extracted data is then processed by the `CharacterTextSplitter`, which splits the text into small, meaningful chunks (usually sentences).
+- These chunks feed the `Chroma` vector store, which converts them into vectors and stores them for fast indexing.
+- Finally, the agent accesses the information of the vector store through the `VectorStoreInfo` tool.
+
+:::info
+The vector store is used for efficient semantic search, while `VectorStoreInfo` carries information about it, such as its name and description. Embeddings are a way to represent words, phrases, or any entities in a vector space. Learn more about them [here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings).
+:::
+
+:::tip
+Once you build this flow, ask questions about the data in the chat interface (e.g., number of rows or columns).
+:::
+
+## โ๏ธ Langflow Example
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+#### Download Flow
+
+:::note LangChain Components ๐ฆ๐
+
+- [`CSVLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv)
+- [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter)
+- [`OpenAIEmbedding`](https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai)
+- [`Chroma`](https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma)
+- [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/)
+- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
+- [`VectorStoreAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore)
+ :::
diff --git a/docs/docs/examples/how-upload-examples.mdx b/docs/docs/examples/how-upload-examples.mdx
new file mode 100644
index 000000000..8a4306212
--- /dev/null
+++ b/docs/docs/examples/how-upload-examples.mdx
@@ -0,0 +1,29 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+# ๐ How to Upload Examples?
+
+We welcome all examples that can help our community learn and explore Langflow's capabilities.
+Langflow Examples is a repository on [GitHub](https://github.com/logspace-ai/langflow_examples) that contains examples of flows that people can use for inspiration and learning.
+
+
+
+
+
+To upload examples, please follow these steps:
+
+1. **Create a Flow:** First, create a flow using Langflow. You can use any of the available templates or create a new flow from scratch.
+
+2. **Export the Flow:** Once you have created a flow, export it as a JSON file. Make sure to give your file a descriptive name and include a brief description of what it does.
+
+3. **Submit a Pull Request:** Finally, submit a pull request (PR) to the examples repo. Make sure to include your JSON file in the PR.
+
+If your example uses any third-party libraries or packages, please include them in your PR and make sure that your example follows the [**โ๏ธ Langflow Code Of Conduct**](https://github.com/logspace-ai/langflow/blob/dev/CODE_OF_CONDUCT.md).
diff --git a/docs/docs/examples/midjourney-prompt-chain.mdx b/docs/docs/examples/midjourney-prompt-chain.mdx
new file mode 100644
index 000000000..d3ca57c91
--- /dev/null
+++ b/docs/docs/examples/midjourney-prompt-chain.mdx
@@ -0,0 +1,40 @@
+# MidJourney Prompt Chain
+
+The `MidJourneyPromptChain` can be used to generate imaginative and detailed MidJourney prompts.
+
+For example, type something like:
+
+```bash
+Dragon
+```
+
+And get a response such as:
+
+```text
+Imagine a mysterious forest, the trees are tall and ancient, their branches reaching up to the sky. Through the darkness, a dragon emerges from the shadows, its scales shimmering in the moonlight. Its wingspan is immense, and its eyes glow with a fierce intensity. It is a majestic and powerful creature, one that commands both respect and fear.
+```
+
+:::tip
+Notice that the `ConversationSummaryMemory` stores a summary of the conversation over time. Try using it to create better prompts as the conversation goes on.
+:::
+
+## โ๏ธ Langflow Example
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+#### Download Flow
+
+:::note LangChain Components ๐ฆ๐
+
+- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
+- [`ConversationSummaryMemory`](https://python.langchain.com/docs/modules/memory/how_to/summary)
+ :::
diff --git a/docs/docs/examples/multiple-vectorstores.mdx b/docs/docs/examples/multiple-vectorstores.mdx
new file mode 100644
index 000000000..36890c866
--- /dev/null
+++ b/docs/docs/examples/multiple-vectorstores.mdx
@@ -0,0 +1,52 @@
+# Multiple Vector Stores
+
+The example below shows an agent operating with two vector stores built upon different data sources.
+
+The `TextLoader` loads a TXT file, while the `WebBaseLoader` pulls text from webpages into a document format to accessed downstream. The `Chroma` vector stores are created analogous to what we have demonstrated in our [CSV Loader](/examples/csv-loader.mdx) example. Finally, the `VectorStoreRouterAgent` constructs an agent that routes between the vector stores.
+
+:::info
+Get the TXT file used [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt).
+:::
+
+URL used by the `WebBaseLoader`:
+
+```txt
+https://pt.wikipedia.org/wiki/Harry_Potter
+```
+
+:::tip
+When you build the flow, request information about one of the sources. The agent should be able to use the correct source to generate a response.
+:::
+
+:::info
+Learn more about Multiple Vector Stores [here](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore?highlight=Multiple%20Vector%20Stores#multiple-vectorstores).
+:::
+
+## โ๏ธ Langflow Example
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+#### Download Flow
+
+:::note LangChain Components ๐ฆ๐
+
+- [`WebBaseLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base)
+- [`TextLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/unstructured_file)
+- [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter)
+- [`OpenAIEmbedding`](https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai)
+- [`Chroma`](https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma)
+- [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/)
+- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
+- [`VectorStoreRouterToolkit`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore)
+- [`VectorStoreRouterAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore)
+
+:::
diff --git a/docs/docs/examples/python-function.mdx b/docs/docs/examples/python-function.mdx
new file mode 100644
index 000000000..12a262a3f
--- /dev/null
+++ b/docs/docs/examples/python-function.mdx
@@ -0,0 +1,48 @@
+# Python Function
+
+Langflow allows you to create a customized tool using the `PythonFunction` connected to a `Tool` component. In this example, Regex is used in Python to validate a pattern.
+
+```python
+import re
+
+def is_brazilian_zipcode(zipcode: str) -> bool:
+ pattern = r"\d{5}-?\d{3}"
+
+ # Check if the zip code matches the pattern
+ if re.match(pattern, zipcode):
+ return True
+
+ return False
+```
+
+:::tip
+When a tool is called, it is often desirable to have its output returned directly to the user. You can do this by setting the **return_direct** flag for a tool to be True.
+:::
+
+The `AgentInitializer` component is a quick way to construct an agent from the model and tools.
+
+:::info
+The `PythonFunction` is a custom component that uses the LangChain ๐ฆ๐ tool decorator. Learn more about it [here](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools).
+:::
+
+## โ๏ธ Langflow Example
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+#### Download Flow
+
+:::note LangChain Components ๐ฆ๐
+
+- [`PythonFunctionTool`](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools)
+- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai)
+- [`AgentInitializer`](https://python.langchain.com/docs/modules/agents/)
+ :::
diff --git a/docs/docs/examples/serp-api-tool.mdx b/docs/docs/examples/serp-api-tool.mdx
new file mode 100644
index 000000000..a7e1d3d8e
--- /dev/null
+++ b/docs/docs/examples/serp-api-tool.mdx
@@ -0,0 +1,45 @@
+# Serp API Tool
+
+The [Serp API](https://serpapi.com/) (Search Engine Results Page) allows developers to scrape results from search engines such as Google, Bing and Yahoo, and can be used as in Langflow through the `Search` component.
+
+:::info
+To use the Serp API, you first need to sign up [Serp API](https://serpapi.com/) for an API key on the provider's website.
+:::
+
+Here, the `ZeroShotPrompt` component specifies a prompt template for the `ZeroShotAgent`. Set a _Prefix_ and _Suffix_ with rules for the agent to obey. In the example, we used default templates.
+
+The `LLMChain` is a simple chain that takes in a prompt template, formats it with the user input, and returns the response from an LLM.
+
+:::tip
+In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the LLM, but feel free to experiment with other Language Models!
+:::
+
+The `ZeroShotAgent` takes the `LLMChain` and the `Search` tool as inputs, using the tool to find information when necessary.
+
+:::info
+Learn more about the Serp API [here](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi).
+:::
+
+## โ๏ธ Langflow Example
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+#### Download Flow
+
+:::note LangChain Components ๐ฆ๐
+
+- [`ZeroShotPrompt`](https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/)
+- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai)
+- [`LLMChain`](https://python.langchain.com/docs/modules/chains/foundational/llm_chain)
+- [`Search`](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi)
+- [`ZeroShotAgent`](https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent)
+ :::
diff --git a/docs/docs/getting-started/creating-flows.mdx b/docs/docs/getting-started/creating-flows.mdx
new file mode 100644
index 000000000..b09951f42
--- /dev/null
+++ b/docs/docs/getting-started/creating-flows.mdx
@@ -0,0 +1,37 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# ๐จ Creating Flows
+
+## Compose
+
+Creating flows with Langflow is easy. Drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://python.langchain.com/docs/modules/) to choose from, including LLMs, prompt serializers, agents, and chains.
+
+
+
+## Fork
+
+The easiest way to start with Langflow is by forking a **community example**. Forking an example stores a copy in your project collection, allowing you to edit and save the modified version as a new flow.
+
+
+
+
+
+## Build
+
+Building a flow means validating if the components have prerequisites fulfilled and are properly instantiated. When a chat message is sent, the flow will run for the first time, executing the pipeline.
+
+
+
+
diff --git a/docs/docs/getting-started/hugging-face-spaces.mdx b/docs/docs/getting-started/hugging-face-spaces.mdx
new file mode 100644
index 000000000..e8b3852a9
--- /dev/null
+++ b/docs/docs/getting-started/hugging-face-spaces.mdx
@@ -0,0 +1,20 @@
+# ๐ค HuggingFace Spaces
+
+A fully featured version of Langflow can be accessed via HuggingFace spaces with no installation required.
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
+
+
+
+Check out Langflow on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow).
diff --git a/docs/docs/getting-started/installation.md b/docs/docs/getting-started/installation.md
new file mode 100644
index 000000000..c3ad54239
--- /dev/null
+++ b/docs/docs/getting-started/installation.md
@@ -0,0 +1,15 @@
+# ๐ฆ How to install?
+
+## Installation
+
+You can install Langflow from pip:
+
+```bash
+pip install langflow
+```
+
+Next, run:
+
+```bash
+langflow
+```
\ No newline at end of file
diff --git a/docs/docs/guidelines/chat-interface.mdx b/docs/docs/guidelines/chat-interface.mdx
new file mode 100644
index 000000000..c09f00076
--- /dev/null
+++ b/docs/docs/guidelines/chat-interface.mdx
@@ -0,0 +1,64 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# Chat Interface
+
+Langflowโs chat interface provides a user-friendly experience and functionality to interact with the model and customize the prompt. The sidebar brings options that allow users to view and edit pre-defined prompt variables. This feature facilitates quick experimentation by enabling the modification of variable values right in the chat.
+
+
+
+
+
+
+Notice that editing variables in the chat interface take place temporarily and wonโt change their original value in the components once the chat is closed.
+
+
+
+
+
+To view the complete prompt in its original, structured format, click the "Display Prompt" option. This feature lets you see the prompt exactly as it entered the model.
+
+
+
+
+
+
+In the chat interface, you can redefine which variable should be interpreted as the chat input. This gives you control over these inputs and allows dynamic and creative interactions.
+
+
+
+
diff --git a/docs/docs/guidelines/collection.mdx b/docs/docs/guidelines/collection.mdx
new file mode 100644
index 000000000..c0616b2e2
--- /dev/null
+++ b/docs/docs/guidelines/collection.mdx
@@ -0,0 +1,13 @@
+import ThemedImage from '@theme/ThemedImage';
+import useBaseUrl from '@docusaurus/useBaseUrl';
+import ZoomableImage from '/src/theme/ZoomableImage.js';
+import ReactPlayer from 'react-player';
+
+# Collection
+
+A collection is a snapshot of the flows available in the database. You can download your entire collection for local storage and upload it anytime for future use.
+
+
+
+
diff --git a/docs/docs/guidelines/components.mdx b/docs/docs/guidelines/components.mdx
new file mode 100644
index 000000000..ba2f5ff33
--- /dev/null
+++ b/docs/docs/guidelines/components.mdx
@@ -0,0 +1,59 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# Component
+
+Components are the building blocks of the flows. They are made of inputs, outputs, and parameters that define their functionality, providing a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://docs.langchain.com/docs/category/components) section.
+
+### Component's Features
+
+
+ During the flow creation process, you will notice handles (colored circles)
+ attached to one or both sides of a component. These handles represent the
+ availability to connect to other components, while their colors are type hints
+ (hover over a handle to see connection details).
+
+
+
+ For example, if you select a ConversationChain component, you
+ will see orange o and purple{" "}
+ o input handles. They indicate that
+ this component accepts an LLM and a Memory component as inputs. The red
+ asterisk * means that at least one input
+ of that type is required.
+
+
+
+
+
+
+
+ On the top right corner, you will find the component status icon ๐ด. Make the
+ necessary connections, build the flow (โก zap icon on the bottom right of the
+ canvas) and once the validation is completed, the status of each validated
+ component should light green ๐ข. Hover over the component status to reveal the
+ outputs going through it in case of success, or the detected error in case of
+ failure.
+
+
+---
+
+### Component's Parameters
+
+Langflow components can be edited in the component settings button. Hide parameters to reduce complexity and keep the canvas clean and intuitive for experimentation.
+
+
+
+
diff --git a/docs/docs/guidelines/features.mdx b/docs/docs/guidelines/features.mdx
new file mode 100644
index 000000000..cf8b09c6e
--- /dev/null
+++ b/docs/docs/guidelines/features.mdx
@@ -0,0 +1,69 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# Features
+
+
+ When you click for New Project, you will see on the top left corner of the
+ screen, some options such as Import, Export,{" "}
+ Code and Save, as displayed in the image
+ below:
+
+
+
+
+
+
+
+ Further down, we will explain each of these options.
+
+
+---
+
+### Import and Export
+
+Flows can be exported and imported as JSON files.
+
+:::caution
+Watch out for API keys being stored in local files.
+:::
+
+---
+
+### Code
+
+The Code button shows snippets to use your flow as a Python object or an API.
+
+**Python Code**
+
+Through the Langflow package, you can load a flow from a JSON file and use it as a LangChain object.
+
+```py
+from langflow import load_flow_from_json
+
+flow = load_flow_from_json("path/to/flow.json")
+# Now you can use it like any chain
+flow("Hey, have you heard of Langflow?")
+```
+
+**API**
+
+Once you save a flow, the API endpoint is created with your latest changes. Click the "code" button to use that flow as an API. You can post-adjust component parameters using the global variable TWEAKS.
+
+The example below shows a Python script making a POST request to a local API endpoint, which gets a prediction based on the message input.
+
+
+
+
diff --git a/docs/docs/guidelines/prompt-customization.mdx b/docs/docs/guidelines/prompt-customization.mdx
new file mode 100644
index 000000000..8e2f409f9
--- /dev/null
+++ b/docs/docs/guidelines/prompt-customization.mdx
@@ -0,0 +1,86 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# Prompt Customization
+
+The prompt template allows users to create prompts and define variables that provide control over instructing the model.
+
+
+
+
+
+
+Variables can be used to define instructions, questions, context, inputs, or examples for the model and can be created with any chosen name in curly brackets, e.g., `{variable_name}`. They act as placeholders for parts of the text that can be easily modified.
+
+
+
+
+
+
+Once inserted, these variables are immediately recognized as new fields in the prompt component. Here, you can define their values within the component itself or leave a field empty to be adjusted over the chat interface.
+
+
+
+
+
+You can also use documents or output parsers as prompt variables. By plugging them into prompt handles, theyโll disable and feed that input field.
+
+
+
+
+
+
+
+With this, users can interact with documents, webpages, or any other type of content directly from the prompt, which allows for seamless integration of external resources with the language model.
+
+
+
+If working with an interactive (chat-like) flow, remember to keep one of the input variables empty to behave as the chat input.
+
+
+
+
+
+
diff --git a/docs/docs/guides/chatprompttemplate_guide.mdx b/docs/docs/guides/chatprompttemplate_guide.mdx
new file mode 100644
index 000000000..422bb6420
--- /dev/null
+++ b/docs/docs/guides/chatprompttemplate_guide.mdx
@@ -0,0 +1,78 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# Building chatbots with System Message
+
+## Overview
+
+In this guide, we will modify the "Basic Chat with Prompt and History" example, integrating the ChatPromptTemplate with the SystemMessagePromptTemplate and HumanMessagePromptTemplate components. By following these steps, you'll be able to build a personalized chatbot that can interpret and respond based on user-defined System messages.
+
+## Interactive Guide
+
+
+
+## Step-by-Step Instructions
+
+1. Navigate to the "Community Examples" section.
+
+2. Locate the "Basic Chat with Prompt and History" example, and click on "Fork Example".
+
+3. Once in the editor, find the "PromptTemplate" component and remove it.
+
+4. Now, add these three components: ChatPromptTemplate, SystemMessagePromptTemplate, and HumanMessagePromptTemplate.
+
+> **Note:** Remember to set the model to gpt-3.5-turbo-0613 or the most up-to-date version. The latest models have improved capabilities to comprehend System messages.
+
+5. Open the "Prompt" field on the SystemMessagePromptTemplate component.
+
+6. Enter the text: `You are a {role} that {behavior}.`
+
+7. Save your changes by clicking on "Check & Save".
+
+8. Define the 'role' variable by typing "obedient assistant".
+
+9. Next, navigate to the HumanMessagePromptTemplate and open the "Prompt" field.
+
+10. Here, simply enter `{input}`.
+
+11. Save these changes by clicking on "Check & Save".
+
+12. Now, you should see your flow populated with the variables you defined.
+
+13. In the Memory component, set the 'Input Key' to "input".
+
+> **Tip:** When using a Memory component with multiple variables, it's crucial to specify which variable should be used to generate the conversation history.
+
+14. Click on the "Build" button to implement your changes.
+
+15. Open the chat interface to test your modifications.
+
+16. You should now be able to see and use the defined variables in the chat interface.
+
+17. Click on 'role' to examine the variable you established in the canvas.
+
+18. Now, let's define the 'behavior' variable.
+
+19. Enter the text: "writes the word 'Langflow' at the end of every sentence."
+
+20. Test your chatbot by typing "How can you help me?"
+
+21. If everything was set up correctly, your chatbot should respond appropriately, following the defined behavior.
+
+22. Congratulations! You have successfully customized and built your chatbot.
+
+By following these instructions, you have created a dynamic chatbot capable of understanding and responding based on custom system messages, enhancing the user experience and interaction. Enjoy your personalized assistant!
diff --git a/docs/docs/guides/loading_document.mdx b/docs/docs/guides/loading_document.mdx
new file mode 100644
index 000000000..d760e9124
--- /dev/null
+++ b/docs/docs/guides/loading_document.mdx
@@ -0,0 +1,64 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# Integrating documents with prompt variables
+
+## Overview
+
+This guide takes you through the process of augmenting the "Basic Chat with Prompt and History" example. You'll learn how to embed documents as context into the PromptTemplate component utilizing a WebBaseLoader.
+
+## Interactive Guide
+
+
+
+## Step-by-Step Instructions
+
+1. Start by navigating to the "Community Examples" section.
+
+2. Find the "Basic Chat with Prompt and History" example and click on "Fork Example".
+
+3. In the editor, open the "Template" field.
+
+4. Here, introduce the `{context}` variable, placing it somewhere before the "Current conversation:" text.
+
+5. Once done, save your changes by clicking on "Check & Save".
+
+6. Next, open the search bar and type "web".
+
+7. Drag and drop a WebBaseLoader (or any other loader of your choice) onto the canvas.
+
+8. Connect this loader to the `{context}` variable that we just added.
+
+9. In the "Web Page" field, enter "https://langflow.org/how-upload-examples".
+
+10. Now, click on "ConversationBufferMemory".
+
+11. In the "Input Key" field, enter "text" to define the Chat variable.
+
+> **Tip:** When defining more than one variable and using a Memory component, it's crucial to specify which variable should be used to create the conversation history.
+
+12. Click on the "Build" button to implement your changes.
+
+13. Open the chat interface to test your modifications.
+
+14. Try asking something like, "How do I upload examples?"
+
+15. Click on "Display Prompt" to view your template.
+
+16. Now, you can see what the model used as a basis to generate its response.
+
+By following these instructions, you have successfully loaded a document into a PromptTemplate variable, allowing for more enriched and context-aware chat responses. This customization enhances user interaction by integrating relevant document content into the chat flow.
diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx
new file mode 100644
index 000000000..4ec4a300d
--- /dev/null
+++ b/docs/docs/index.mdx
@@ -0,0 +1,18 @@
+# ๐ Welcome to Langflow
+
+Langflow is an easy way to prototype [LangChain](https://github.com/hwchase17/langchain) flows. The drag-and-drop feature allows quick and effortless experimentation, while the built-in chat interface facilitates real-time interaction. It provides options to edit prompt parameters, create chains and agents, track thought processes, and export flows.
+
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+
+
))}
diff --git a/src/frontend/src/alerts/notice/index.tsx b/src/frontend/src/alerts/notice/index.tsx
index a61b29822..c77a4bcd6 100644
--- a/src/frontend/src/alerts/notice/index.tsx
+++ b/src/frontend/src/alerts/notice/index.tsx
@@ -1,8 +1,8 @@
import { Transition } from "@headlessui/react";
+import { Info } from "lucide-react";
import { useEffect, useState } from "react";
import { Link } from "react-router-dom";
import { NoticeAlertType } from "../../types/alerts";
-import { Info } from "lucide-react";
export default function NoticeAlert({
title,
@@ -36,22 +36,19 @@ export default function NoticeAlert({
setShow(false);
removeAlert(id);
}}
- className="rounded-md w-96 mt-6 shadow-xl bg-blue-50 dark:bg-blue-900 p-4"
+ className="mt-6 w-96 rounded-md bg-info-background p-4 shadow-xl"
>
-
+
-
{title}
-
+
{title}
+
{link !== "" ? (
Details
diff --git a/src/frontend/src/alerts/success/index.tsx b/src/frontend/src/alerts/success/index.tsx
index 120644506..60dba1e93 100644
--- a/src/frontend/src/alerts/success/index.tsx
+++ b/src/frontend/src/alerts/success/index.tsx
@@ -1,7 +1,7 @@
import { Transition } from "@headlessui/react";
+import { CheckCircle2 } from "lucide-react";
import { useEffect, useState } from "react";
import { SuccessAlertType } from "../../types/alerts";
-import { CheckCircle2 } from "lucide-react";
export default function SuccessAlert({
title,
@@ -34,19 +34,14 @@ export default function SuccessAlert({
setShow(false);
removeAlert(id);
}}
- className="rounded-md w-96 mt-6 shadow-xl bg-green-50 dark:bg-green-900 p-4"
+ className="success-alert"
>
-
+
-
- {title}
-
+
{title}
diff --git a/src/frontend/src/assets/male-technologist.png b/src/frontend/src/assets/male-technologist.png
new file mode 100644
index 000000000..3b5281237
Binary files /dev/null and b/src/frontend/src/assets/male-technologist.png differ
diff --git a/src/frontend/src/assets/robot.png b/src/frontend/src/assets/robot.png
new file mode 100644
index 000000000..81de3118d
Binary files /dev/null and b/src/frontend/src/assets/robot.png differ
diff --git a/src/frontend/src/components/AccordionComponent/index.tsx b/src/frontend/src/components/AccordionComponent/index.tsx
index 8990da245..3b0ddd596 100644
--- a/src/frontend/src/components/AccordionComponent/index.tsx
+++ b/src/frontend/src/components/AccordionComponent/index.tsx
@@ -1,16 +1,11 @@
-import { ReactElement, useContext, useEffect, useRef, useState } from "react";
-import {
- AccordionComponentType,
- ProgressBarType,
-} from "../../types/components";
-import { Progress } from "../../components/ui/progress";
-import { setInterval } from "timers/promises";
+import { useState } from "react";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "../../components/ui/accordion";
+import { AccordionComponentType } from "../../types/components";
export default function AccordionComponent({
trigger,
@@ -18,7 +13,7 @@ export default function AccordionComponent({
open = [],
}: AccordionComponentType) {
const [value, setValue] = useState(
- open.length == 0 ? "" : getOpenAccordion(),
+ open.length === 0 ? "" : getOpenAccordion()
);
function getOpenAccordion() {
diff --git a/src/frontend/src/components/CrashErrorComponent/index.tsx b/src/frontend/src/components/CrashErrorComponent/index.tsx
index adc8909a8..fd7d22c36 100644
--- a/src/frontend/src/components/CrashErrorComponent/index.tsx
+++ b/src/frontend/src/components/CrashErrorComponent/index.tsx
@@ -1,11 +1,11 @@
export default function CrashErrorComponent({ error, resetErrorBoundary }) {
return (
-
-
-
+
+
+
Oops! An unknown error has occurred.
-
+
Please click the 'Reset Application' button to restore the
application's state. If the error persists, please create an issue on
our GitHub page. We apologize for any inconvenience this may have
@@ -14,7 +14,7 @@ export default function CrashErrorComponent({ error, resetErrorBoundary }) {