Release 0.5.0 (#1014)

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-10-05 15:25:12 -03:00 committed by GitHub
commit 7eadd9c619
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
392 changed files with 27858 additions and 6657 deletions

1
.dockerignore Normal file
View file

@ -0,0 +1 @@
.venv/

View file

@ -45,3 +45,21 @@ LANGFLOW_OPEN_BROWSER=
# Values: true, false
# Example: LANGFLOW_REMOVE_API_KEYS=false
LANGFLOW_REMOVE_API_KEYS=
# Whether to use RedisCache or InMemoryCache
# Values: memory, redis
# Example: LANGFLOW_CACHE_TYPE=memory
# If you want to use redis then the following environment variables must be set:
# LANGFLOW_REDIS_HOST (default: localhost)
# LANGFLOW_REDIS_PORT (default: 6379)
# LANGFLOW_REDIS_DB (default: 0)
# LANGFLOW_REDIS_CACHE_EXPIRE (default: 3600)
LANGFLOW_CACHE_TYPE=
# Superuser username
# Example: LANGFLOW_SUPERUSER=admin
LANGFLOW_SUPERUSER=
# Superuser password
# Example: LANGFLOW_SUPERUSER_PASSWORD=123456
LANGFLOW_SUPERUSER_PASSWORD=

34
.gitattributes vendored Normal file
View file

@ -0,0 +1,34 @@
# Set the default behavior, in case people don't have core.autocrlf set.
* text eol=lf
# Explicitly declare text files you want to always be normalized and converted
# to native line endings on checkout.
*.c text
*.h text
*.py text
*.js text
*.jsx text
*.ts text
*.tsx text
*.md text
*.mdx text
*.yml text
*.yaml text
*.xml text
*.csv text
*.json text
*.sh text
*.Dockerfile text
Dockerfile text
# Declare files that will always have CRLF line endings on checkout.
*.sln text eol=crlf
# Denote all files that are truly binary and should not be modified.
*.png binary
*.jpg binary
*.ico binary
*.gif binary
*.mp4 binary
*.svg binary
*.csv binary

44
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,44 @@
name: "Async API tests"
on:
push:
branches:
- dev
pull_request:
branches:
- dev
- main
jobs:
build-and-test:
runs-on: ubuntu-latest
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Set up Docker
run: docker --version && docker-compose --version
- name: "Create env file"
working-directory: ./deploy
run: |
echo "${{ secrets.ENV_FILE }}" > .env
- name: Build and start services
working-directory: ./deploy
run: docker compose up --exit-code-from tests tests result_backend broker celeryworker db --build
continue-on-error: true
- name: Stop services
run: docker compose down

View file

@ -14,9 +14,7 @@ env:
jobs:
if_release:
if: |
${{ github.event.pull_request.merged == true }}
&& ${{ contains(github.event.pull_request.labels.*.name, 'pre-release') }}
if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -40,6 +38,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
draft: false
generateReleaseNotes: true
prerelease: true
tag: v${{ steps.check-version.outputs.version }}
commit: main
- name: Publish to PyPI

View file

@ -45,11 +45,3 @@ jobs:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
run: |
poetry publish
- name: Trigger build and push on langchain-serve
uses: peter-evans/repository-dispatch@v2
with:
token: ${{ secrets.SERVE_GITHUB_TOKEN }}
repository: jina-ai/langchain-serve
event-type: langflow-push
client-payload: '{"push_token": "${{ secrets.LCSERVE_PUSH_TOKEN }}", "branch": "main"}'

View file

@ -1,17 +0,0 @@
name: Trigger build and push on langchain-serve
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Trigger build and push on langchain-serve
uses: peter-evans/repository-dispatch@v2
with:
token: ${{ secrets.SERVE_GITHUB_TOKEN }}
repository: jina-ai/langchain-serve
event-type: langflow-push
client-payload: '{"push_token": "${{ secrets.LCSERVE_PUSH_TOKEN }}", "branch": "dev"}'

View file

@ -7,7 +7,7 @@ on:
branches: [dev]
env:
POETRY_VERSION: "1.4.0"
POETRY_VERSION: "1.5.0"
jobs:
build:
@ -16,6 +16,8 @@ jobs:
matrix:
python-version:
- "3.10"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
steps:
- uses: actions/checkout@v3
- name: Install poetry

2
.gitignore vendored
View file

@ -253,3 +253,5 @@ langflow.db
.docusaurus/
/tmp/*
src/backend/langflow/frontend/
.docker

10
.vscode/launch.json vendored
View file

@ -1,4 +1,5 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Backend",
@ -38,6 +39,15 @@
"request": "launch",
"url": "http://localhost:3000/",
"webRoot": "${workspaceRoot}/src/frontend"
},
{
"name": "Python: Debug Tests",
"type": "python",
"request": "launch",
"program": "${file}",
"purpose": ["debug-test"],
"console": "integratedTerminal",
"justMyCode": false
}
]
}

48
.vscode/tasks.json vendored Normal file
View file

@ -0,0 +1,48 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "Init",
"type": "shell",
"command": "make init"
},
// make backend
{
"label": "Backend",
"type": "shell",
"command": "make backend"
},
// make frontend
{
"label": "Frontend",
"type": "shell",
"command": "make frontend"
},
// make test
{
"label": "Test",
"type": "shell",
"command": "make tests"
},
// make lint
{
"label": "Lint",
"type": "shell",
"command": "make lint"
},
// make format
{
"label": "Format",
"type": "shell",
"command": "make format"
},
// make install
{
"label": "Install",
"type": "shell",
"command": "make install_backend && make install_frontend"
}
]
}

View file

@ -7,6 +7,11 @@ to contributions, whether it be in the form of a new feature, improved infra, or
To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow.
Please do not try to push directly to this repo unless you are a maintainer.
The branch structure is as follows:
- `main`: The stable version of Langflow
- `dev`: The development version of Langflow. This branch is used to test new features before they are merged into `main` and, as such, may be unstable.
## 🗺Contributing Guidelines
## 🚩GitHub Issues

View file

@ -19,6 +19,7 @@ coverage:
--cov-report term-missing:skip-covered
tests:
@make install_backend
poetry run pytest tests
format:
@ -27,23 +28,40 @@ format:
cd src/frontend && npm run format
lint:
# skip .venv folder
poetry run mypy --exclude .venv ./src/backend/langflow
poetry run mypy src/backend/langflow
poetry run black . --check
poetry run ruff . --fix
install_frontend:
cd src/frontend && npm install
install_frontendc:
cd src/frontend && rm -rf node_modules package-lock.json && npm install
run_frontend:
cd src/frontend && npm start
run_cli:
poetry run langflow run --path src/frontend/build
run_cli_debug:
poetry run langflow run --path src/frontend/build --log-level debug
setup_devcontainer:
make init
make build_frontend
poetry run langflow --path src/frontend/build
frontend:
make install_frontend
make run_frontend
frontendc:
make install_frontendc
make run_frontend
install_backend:
poetry install
poetry install --extras deploy
backend:
make install_backend
@ -52,7 +70,7 @@ backend:
build_and_run:
echo 'Removing dist folder'
rm -rf dist
make build && poetry run pip install dist/*.tar.gz && poetry run langflow
make build && poetry run pip install dist/*.tar.gz && poetry run langflow run
build_and_install:
echo 'Removing dist folder'
@ -69,17 +87,6 @@ build:
poetry build --format sdist
rm -rf src/backend/langflow/frontend
lcserve_push:
make build_frontend
@version=$$(poetry version --short); \
lc-serve push --app langflow.lcserve:app --app-dir . \
--image-name langflow --image-tag $${version} --verbose --public
lcserve_deploy:
@:$(if $(uses),,$(error `uses` is not set. Please run `make uses=... lcserve_deploy`))
lc-serve deploy jcloud --app langflow.lcserve:app --app-dir . \
--uses $(uses) --config src/backend/langflow/jcloud.yml --verbose
dev:
make install_frontend
ifeq ($(build),1)

118
README.md
View file

@ -36,8 +36,6 @@
- [Environment Variables](#environment-variables)
- [Deployment](#deployment)
- [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
- [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
- [API Usage](#api-usage)
- [Deploy on Railway](#deploy-on-railway)
- [Deploy on Render](#deploy-on-render)
- [🎨 Creating Flows](#-creating-flows)
@ -78,7 +76,7 @@ python -m langflow
or
```shell
langflow # or langflow --help
langflow run # or langflow --help
```
### HuggingFace Spaces
@ -94,7 +92,7 @@ Langflow provides a command-line interface (CLI) for easy management and configu
You can run the Langflow using the following command:
```shell
langflow [OPTIONS]
langflow run [OPTIONS]
```
Each option is detailed below:
@ -110,7 +108,6 @@ Each option is detailed below:
- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`.
- `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`.
- `--cache`: Selects the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`.
- `--jcloud/--no-jcloud`: Toggles the option to deploy on Jina AI Cloud. The default is `no-jcloud`.
- `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`.
- `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable.
- `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`.
@ -134,115 +131,6 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
## Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud.
Start by installing `langchain-serve` with
```bash
pip install langflow[deploy]
# or
pip install -U langchain-serve
```
Then, run:
```bash
langflow --jcloud
```
```text
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://<your-app>.wolf.jina.ai/
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
```
<details>
<summary>Show complete (example) output</summary>
```text
🚀 Deploying Langflow server on Jina AI Cloud
╭───────────────────────── 🎉 Flow is available! ──────────────────────────╮
│ │
│ ID langflow-e3dd8820ec │
│ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai │
│ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec │
│ │
╰──────────────────────────────────────────────────────────────────────────╯
╭──────────────┬──────────────────────────────────────────────────────────────────────────────╮
│ App ID │ langflow-e3dd8820ec │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ Phase │ Serving │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ Endpoint │ wss://langflow-e3dd8820ec.wolf.jina.ai │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ App logs │ dashboards.wolf.jina.ai │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ Swagger UI │ https://langflow-e3dd8820ec.wolf.jina.ai/docs │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ OpenAPI JSON │ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json │
╰──────────────┴──────────────────────────────────────────────────────────────────────────────╯
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
```
</details>
#### API Usage
You can use Langflow directly on your browser, or use the API endpoints on Jina AI Cloud to interact with the server.
<details>
<summary>Show API usage (with python)</summary>
```python
import requests
BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict"
FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468"
# You can tweak the flow by adding a tweaks dictionary
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
TWEAKS = {
"ChatOpenAI-g4jEr": {},
"ConversationChain-UidfJ": {}
}
def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
"""
Run a flow with a given message and optional tweaks.
:param message: The message to send to the flow
:param flow_id: The ID of the flow to run
:param tweaks: Optional tweaks to customize the flow
:return: The JSON response from the flow
"""
api_url = f"{BASE_API_URL}/{flow_id}"
payload = {"message": message}
if tweaks:
payload["tweaks"] = tweaks
response = requests.post(api_url, json=payload)
return response.json()
# Setup any tweaks you want to apply to the flow
print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
```
```json
{
"result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!"
}
```
</details>
> Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
## Deploy on Railway
[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/JMXEWp?referralCode=MnPSdg)
@ -275,6 +163,8 @@ flow("Hey, have you heard of Langflow?")
We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible.
---
Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions and showcase your projects! 🦾
<p>

97
base.Dockerfile Normal file
View file

@ -0,0 +1,97 @@
# syntax=docker/dockerfile:1
# Keep this syntax directive! It's used to enable Docker BuildKit
# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865
# but I try to keep it updated (see history)
################################
# PYTHON-BASE
# Sets up all our shared environment variables
################################
FROM python:3.10-slim as python-base
# python
ENV PYTHONUNBUFFERED=1 \
# prevents python creating .pyc files
PYTHONDONTWRITEBYTECODE=1 \
\
# pip
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.5.1 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
# it gets named `.venv`
POETRY_VIRTUALENVS_IN_PROJECT=true \
# do not ask any interactive question
POETRY_NO_INTERACTION=1 \
\
# paths
# this is where our requirements + virtual environment will live
PYSETUP_PATH="/opt/pysetup" \
VENV_PATH="/opt/pysetup/.venv"
# prepend poetry and venv to path
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
################################
# BUILDER-BASE
# Used to build deps + create our virtual environment
################################
FROM python-base as builder-base
RUN apt-get update \
&& apt-get install --no-install-recommends -y \
# deps for installing poetry
curl \
# deps for building python deps
build-essential
# install poetry - respects $POETRY_VERSION & $POETRY_HOME
# The --mount will mount the buildx cache directory to where
# Poetry and Pip store their cache so that they can re-use it
RUN --mount=type=cache,target=/root/.cache \
curl -sSL https://install.python-poetry.org | python3 -
# copy project requirement files here to ensure they will be cached.
WORKDIR $PYSETUP_PATH
COPY poetry.lock pyproject.toml ./
COPY ./src/backend/langflow/main.py ./src/backend/langflow/main.py
# Copy README.md to the build context
COPY README.md .
# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally
RUN --mount=type=cache,target=/root/.cache \
poetry install --without dev --extras deploy
################################
# DEVELOPMENT
# Image used during development / testing
################################
FROM python-base as development
WORKDIR $PYSETUP_PATH
# copy in our built poetry + venv
COPY --from=builder-base $POETRY_HOME $POETRY_HOME
COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH
# Copy just one file to avoid rebuilding the whole image
COPY ./src/backend/langflow/__init__.py ./src/backend/langflow/__init__.py
# quicker install as runtime deps are already installed
RUN --mount=type=cache,target=/root/.cache \
poetry install --with=dev --extras deploy
# copy in our app code
COPY ./src/backend ./src/backend
RUN --mount=type=cache,target=/root/.cache \
poetry install --with=dev --extras deploy
COPY ./tests ./tests=

57
deploy/.env.example Normal file
View file

@ -0,0 +1,57 @@
DOMAIN=localhost
STACK_NAME=langflow-stack
ENVIRONMENT=development
TRAEFIK_PUBLIC_NETWORK=traefik-public
TRAEFIK_TAG=langflow-traefik
TRAEFIK_PUBLIC_TAG=traefik-public
# RabbitMQ configuration
RABBITMQ_DEFAULT_USER=langflow
RABBITMQ_DEFAULT_PASS=langflow
# Database configuration
DB_USER=langflow
DB_PASSWORD=langflow
DB_HOST=db
DB_PORT=5432
DB_NAME=langflow
# Logging configuration
LOG_LEVEL=debug
# DB configuration
POSTGRES_USER=langflow
POSTGRES_PASSWORD=langflow
POSTGRES_DB=langflow
POSTGRES_PORT=5432
# Flower configuration
LANGFLOW_CACHE_TYPE=redis
LANGFLOW_REDIS_HOST=result_backend
LANGFLOW_REDIS_PORT=6379
LANGFLOW_REDIS_DB=0
LANGFLOW_REDIS_EXPIRE=3600
LANGFLOW_REDIS_PASSWORD=
FLOWER_UNAUTHENTICATED_API=True
BROKER_URL=amqp://langflow:langflow@broker:5672
RESULT_BACKEND=redis://result_backend:6379/0
C_FORCE_ROOT="true"
# Frontend configuration
VITE_PROXY_TARGET=http://backend:7860/api/
BACKEND_URL=http://backend:7860
# PGAdmin configuration
PGADMIN_DEFAULT_EMAIL=admin@admin.com
PGADMIN_DEFAULT_PASSWORD=admin
# OpenAI configuration (for testing purposes)
OPENAI_API_KEY=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
# Superuser configuration
LANGFLOW_SUPERUSER=superuser
LANGFLOW_SUPERUSER_PASSWORD=superuser
# New user configuration
LANGFLOW_NEW_USER_IS_ACTIVE=False

1
deploy/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
pgadmin

92
deploy/base.Dockerfile Normal file
View file

@ -0,0 +1,92 @@
# syntax=docker/dockerfile:1
# Keep this syntax directive! It's used to enable Docker BuildKit
# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865
# but I try to keep it updated (see history)
################################
# PYTHON-BASE
# Sets up all our shared environment variables
################################
FROM python:3.10-slim as python-base
# python
ENV PYTHONUNBUFFERED=1 \
# prevents python creating .pyc files
PYTHONDONTWRITEBYTECODE=1 \
\
# pip
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.5.1 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
# it gets named `.venv`
POETRY_VIRTUALENVS_IN_PROJECT=true \
# do not ask any interactive question
POETRY_NO_INTERACTION=1 \
\
# paths
# this is where our requirements + virtual environment will live
PYSETUP_PATH="/opt/pysetup" \
VENV_PATH="/opt/pysetup/.venv"
# prepend poetry and venv to path
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
################################
# BUILDER-BASE
# Used to build deps + create our virtual environment
################################
FROM python-base as builder-base
RUN apt-get update \
&& apt-get install --no-install-recommends -y \
# deps for installing poetry
curl \
# deps for building python deps
build-essential
# install poetry - respects $POETRY_VERSION & $POETRY_HOME
# The --mount will mount the buildx cache directory to where
# Poetry and Pip store their cache so that they can re-use it
RUN --mount=type=cache,target=/root/.cache \
curl -sSL https://install.python-poetry.org | python3 -
# copy project requirement files here to ensure they will be cached.
WORKDIR $PYSETUP_PATH
COPY ./poetry.lock ./pyproject.toml ./
# Copy README.md to the build context
COPY ./README.md ./
# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally
RUN --mount=type=cache,target=/root/.cache \
poetry install --without dev --extras deploy
################################
# DEVELOPMENT
# Image used during development / testing
################################
FROM python-base as development
WORKDIR $PYSETUP_PATH
# copy in our built poetry + venv
COPY --from=builder-base $POETRY_HOME $POETRY_HOME
COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH
# Copy just one file to avoid rebuilding the whole image
COPY ./src/backend/langflow/__init__.py ./src/backend/langflow/__init__.py
# quicker install as runtime deps are already installed
RUN --mount=type=cache,target=/root/.cache \
poetry install --with=dev --extras deploy
# copy in our app code
COPY ./src/backend ./src/backend
COPY ./tests ./tests

View file

@ -0,0 +1,67 @@
version: "3.8"
services:
proxy:
ports:
- "80:80"
- "8090:8080"
command:
# Enable Docker in Traefik, so that it reads labels from Docker services
- --providers.docker
# Add a constraint to only use services with the label for this stack
# from the env var TRAEFIK_TAG
- --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`)
# Do not expose all Docker services, only the ones explicitly exposed
- --providers.docker.exposedbydefault=false
# Disable Docker Swarm mode for local development
# - --providers.docker.swarmmode
# Enable the access log, with HTTP requests
- --accesslog
# Enable the Traefik log, for configurations and errors
- --log
# Enable the Dashboard and API
- --api
# Enable the Dashboard and API in insecure mode for local development
- --api.insecure=true
labels:
- traefik.enable=true
- traefik.http.routers.${STACK_NAME?Variable not set}-traefik-public-http.rule=Host(`${DOMAIN?Variable not set}`)
- traefik.http.services.${STACK_NAME?Variable not set}-traefik-public.loadbalancer.server.port=80
result_backend:
ports:
- "6379:6379"
pgadmin:
ports:
- "5050:5050"
flower:
ports:
- "5555:5555"
backend:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
frontend:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`)
- traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80
celeryworker:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-celeryworker-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-celeryworker.loadbalancer.server.port=7860
networks:
traefik-public:
# For local dev, don't expect an external Traefik network
external: false

View file

@ -0,0 +1,277 @@
version: "3.8"
services:
proxy:
image: traefik:v3.0
env_file:
- .env
networks:
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- default
volumes:
- /var/run/docker.sock:/var/run/docker.sock
command:
# Enable Docker in Traefik, so that it reads labels from Docker services
- --providers.docker
# Add a constraint to only use services with the label for this stack
# from the env var TRAEFIK_TAG
- --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`)
# Do not expose all Docker services, only the ones explicitly exposed
- --providers.docker.exposedbydefault=false
# Enable the access log, with HTTP requests
- --accesslog
# Enable the Traefik log, for configurations and errors
- --log
# Enable the Dashboard and API
- --api
deploy:
placement:
constraints:
- node.role == manager
labels:
# Enable Traefik for this service, to make it available in the public network
- traefik.enable=true
# Use the traefik-public network (declared below)
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
# Use the custom label "traefik.constraint-label=traefik-public"
# This public Traefik will only use services with this label
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
# traefik-http set up only to use the middleware to redirect to https
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.scheme=https
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.permanent=true
# Handle host with and without "www" to redirect to only one of them
# Uses environment variable DOMAIN
# To disable www redirection remove the Host() you want to discard, here and
# below for HTTPS
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.entrypoints=http
# traefik-https the actual router using HTTPS
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.entrypoints=https
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls=true
# Use the "le" (Let's Encrypt) resolver created below
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls.certresolver=le
# Define the port inside of the Docker service to use
- traefik.http.services.${STACK_NAME?Variable not set}-proxy.loadbalancer.server.port=80
# Handle domain with and without "www" to redirect to only one
# To disable www redirection remove the next line
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.regex=^https?://(www.)?(${DOMAIN?Variable not set})/(.*)
# Redirect a domain with www to non-www
# To disable it remove the next line
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.replacement=https://${DOMAIN?Variable not set}/$${3}
# Redirect a domain without www to www
# To enable it remove the previous line and uncomment the next
# - traefik.http.middlewares.${STACK_NAME}-www-redirect.redirectregex.replacement=https://www.${DOMAIN}/$${3}
# Middleware to redirect www, to disable it remove the next line
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.middlewares=${STACK_NAME?Variable not set}-www-redirect
# Middleware to redirect www, and redirect HTTP to HTTPS
# to disable www redirection remove the section: ${STACK_NAME?Variable not set}-www-redirect,
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.middlewares=${STACK_NAME?Variable not set}-www-redirect,${STACK_NAME?Variable not set}-https-redirect
backend: &backend
image: "ogabrielluiz/langflow:latest"
build:
context: ../
dockerfile: base.Dockerfile
depends_on:
- db
- broker
- result_backend
env_file:
- .env
volumes:
- ../:/app
- ./startup-backend.sh:/startup-backend.sh # Ensure the paths match
command: /startup-backend.sh # Fixed the path
healthcheck:
test: "exit 0"
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
db:
image: postgres:15.4
volumes:
- app-db-data:/var/lib/postgresql/data/pgdata
environment:
- PGDATA=/var/lib/postgresql/data/pgdata
deploy:
placement:
constraints:
- node.labels.app-db-data == true
healthcheck:
test: "exit 0"
env_file:
- .env
pgadmin:
image: dpage/pgadmin4
networks:
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- default
volumes:
- pgadmin-data:/var/lib/pgadmin
env_file:
- .env
deploy:
labels:
- traefik.enable=true
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.entrypoints=http
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.entrypoints=https
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls=true
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls.certresolver=le
- traefik.http.services.${STACK_NAME?Variable not set}-pgadmin.loadbalancer.server.port=5050
result_backend:
image: redis:6.2.5
env_file:
- .env
# ports:
# - 6379:6379
healthcheck:
test: "exit 0"
celeryworker:
<<: *backend
env_file:
- .env
build:
context: ../
dockerfile: base.Dockerfile
command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h
healthcheck:
test: "exit 0"
deploy:
replicas: 1
flower:
<<: *backend
env_file:
- .env
networks:
- default
build:
context: ../
dockerfile: base.Dockerfile
environment:
- FLOWER_PORT=5555
command: /bin/sh -c "celery -A langflow.worker.celery_app --broker=${BROKER_URL?Variable not set} flower --port=5555"
deploy:
labels:
- traefik.enable=true
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.rule=Host(`flower.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.entrypoints=http
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.rule=Host(`flower.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.entrypoints=https
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls=true
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls.certresolver=le
- traefik.http.services.${STACK_NAME?Variable not set}-flower.loadbalancer.server.port=5555
frontend:
image: "ogabrielluiz/langflow_frontend:latest"
env_file:
- .env
# user: your-non-root-user
build:
context: ../src/frontend
dockerfile: Dockerfile
args:
- BACKEND_URL=http://backend:7860
restart: on-failure
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`)
- traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80
broker:
# RabbitMQ management console
image: rabbitmq:3-management
environment:
- RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER:-admin}
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS:-admin}
volumes:
- rabbitmq_data:/etc/rabbitmq/
- rabbitmq_data:/var/lib/rabbitmq/
- rabbitmq_log:/var/log/rabbitmq/
ports:
- 5672:5672
- 15672:15672
prometheus:
image: prom/prometheus:v2.37.9
env_file:
- .env
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
command:
- "--config.file=/etc/prometheus/prometheus.yml"
# ports:
# - 9090:9090
healthcheck:
test: "exit 0"
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-prometheus-http.rule=PathPrefix(`/metrics`)
- traefik.http.services.${STACK_NAME?Variable not set}-prometheus.loadbalancer.server.port=9090
grafana:
image: grafana/grafana:8.2.6
env_file:
- .env
# ports:
# - 3000:3000
volumes:
- grafana_data:/var/lib/grafana
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-grafana-http.rule=PathPrefix(`/grafana`)
- traefik.http.services.${STACK_NAME?Variable not set}-grafana.loadbalancer.server.port=3000
tests:
extends:
file: docker-compose.yml
service: backend
env_file:
- .env
build:
context: ../
dockerfile: base.Dockerfile
command: pytest -vv
healthcheck:
test: "exit 0"
# override deploy labels to avoid conflicts with the backend service
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-tests-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-tests.loadbalancer.server.port=7861
volumes:
grafana_data:
app-db-data:
rabbitmq_data:
rabbitmq_log:
pgadmin-data:
networks:
traefik-public:
# Allow setting it to false for testing
external: false # ${TRAEFIK_PUBLIC_NETWORK_IS_EXTERNAL-true}

258
deploy/docker-compose.yml Normal file
View file

@ -0,0 +1,258 @@
version: "3.8"
services:
proxy:
image: traefik:v3.0
env_file:
- .env
networks:
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- default
volumes:
- /var/run/docker.sock:/var/run/docker.sock
command:
# Enable Docker in Traefik, so that it reads labels from Docker services
- --providers.docker
# Add a constraint to only use services with the label for this stack
# from the env var TRAEFIK_TAG
- --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`)
# Do not expose all Docker services, only the ones explicitly exposed
- --providers.docker.exposedbydefault=false
# Enable the access log, with HTTP requests
- --accesslog
# Enable the Traefik log, for configurations and errors
- --log
# Enable the Dashboard and API
- --api
deploy:
placement:
constraints:
- node.role == manager
labels:
# Enable Traefik for this service, to make it available in the public network
- traefik.enable=true
# Use the traefik-public network (declared below)
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
# Use the custom label "traefik.constraint-label=traefik-public"
# This public Traefik will only use services with this label
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
# traefik-http set up only to use the middleware to redirect to https
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.scheme=https
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.permanent=true
# Handle host with and without "www" to redirect to only one of them
# Uses environment variable DOMAIN
# To disable www redirection remove the Host() you want to discard, here and
# below for HTTPS
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.entrypoints=http
# traefik-https the actual router using HTTPS
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.entrypoints=https
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls=true
# Use the "le" (Let's Encrypt) resolver created below
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls.certresolver=le
# Define the port inside of the Docker service to use
- traefik.http.services.${STACK_NAME?Variable not set}-proxy.loadbalancer.server.port=80
# Handle domain with and without "www" to redirect to only one
# To disable www redirection remove the next line
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.regex=^https?://(www.)?(${DOMAIN?Variable not set})/(.*)
# Redirect a domain with www to non-www
# To disable it remove the next line
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.replacement=https://${DOMAIN?Variable not set}/$${3}
# Redirect a domain without www to www
# To enable it remove the previous line and uncomment the next
# - traefik.http.middlewares.${STACK_NAME}-www-redirect.redirectregex.replacement=https://www.${DOMAIN}/$${3}
# Middleware to redirect www, to disable it remove the next line
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.middlewares=${STACK_NAME?Variable not set}-www-redirect
# Middleware to redirect www, and redirect HTTP to HTTPS
# to disable www redirection remove the section: ${STACK_NAME?Variable not set}-www-redirect,
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.middlewares=${STACK_NAME?Variable not set}-www-redirect,${STACK_NAME?Variable not set}-https-redirect
backend: &backend
image: "ogabrielluiz/langflow:latest"
build:
context: ../
dockerfile: base.Dockerfile
depends_on:
- db
- broker
- result_backend
env_file:
- .env
volumes:
- ../:/app
- ./startup-backend.sh:/startup-backend.sh # Ensure the paths match
command: /startup-backend.sh # Fixed the path
healthcheck:
test: "exit 0"
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
db:
image: postgres:15.4
volumes:
- app-db-data:/var/lib/postgresql/data/pgdata
environment:
- PGDATA=/var/lib/postgresql/data/pgdata
deploy:
placement:
constraints:
- node.labels.app-db-data == true
healthcheck:
test: "exit 0"
env_file:
- .env
pgadmin:
image: dpage/pgadmin4
networks:
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- default
volumes:
- pgadmin-data:/var/lib/pgadmin
env_file:
- .env
deploy:
labels:
- traefik.enable=true
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.entrypoints=http
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.entrypoints=https
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls=true
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls.certresolver=le
- traefik.http.services.${STACK_NAME?Variable not set}-pgadmin.loadbalancer.server.port=5050
result_backend:
image: redis:6.2.5
env_file:
- .env
# ports:
# - 6379:6379
healthcheck:
test: "exit 0"
celeryworker:
<<: *backend
env_file:
- .env
build:
context: ../
dockerfile: base.Dockerfile
command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h
healthcheck:
test: "exit 0"
deploy:
replicas: 1
flower:
<<: *backend
env_file:
- .env
networks:
- default
build:
context: ../
dockerfile: base.Dockerfile
environment:
- FLOWER_PORT=5555
command: /bin/sh -c "celery -A langflow.worker.celery_app --broker=${BROKER_URL?Variable not set} flower --port=5555"
deploy:
labels:
- traefik.enable=true
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.rule=Host(`flower.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.entrypoints=http
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.rule=Host(`flower.${DOMAIN?Variable not set}`)
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.entrypoints=https
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls=true
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls.certresolver=le
- traefik.http.services.${STACK_NAME?Variable not set}-flower.loadbalancer.server.port=5555
frontend:
image: "ogabrielluiz/langflow_frontend:latest"
env_file:
- .env
# user: your-non-root-user
build:
context: ../src/frontend
dockerfile: Dockerfile
args:
- BACKEND_URL=http://backend:7860
restart: on-failure
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`)
- traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80
broker:
# RabbitMQ management console
image: rabbitmq:3-management
environment:
- RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER:-admin}
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS:-admin}
volumes:
- rabbitmq_data:/etc/rabbitmq/
- rabbitmq_data:/var/lib/rabbitmq/
- rabbitmq_log:/var/log/rabbitmq/
ports:
- 5672:5672
- 15672:15672
prometheus:
image: prom/prometheus:v2.37.9
env_file:
- .env
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
command:
- "--config.file=/etc/prometheus/prometheus.yml"
# ports:
# - 9090:9090
healthcheck:
test: "exit 0"
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-prometheus-http.rule=PathPrefix(`/metrics`)
- traefik.http.services.${STACK_NAME?Variable not set}-prometheus.loadbalancer.server.port=9090
grafana:
image: grafana/grafana:8.2.6
env_file:
- .env
# ports:
# - 3000:3000
volumes:
- grafana_data:/var/lib/grafana
deploy:
labels:
- traefik.enable=true
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
- traefik.http.routers.${STACK_NAME?Variable not set}-grafana-http.rule=PathPrefix(`/grafana`)
- traefik.http.services.${STACK_NAME?Variable not set}-grafana.loadbalancer.server.port=3000
volumes:
grafana_data:
app-db-data:
rabbitmq_data:
rabbitmq_log:
pgadmin-data:
networks:
traefik-public:
# Allow setting it to false for testing
external: false # ${TRAEFIK_PUBLIC_NETWORK_IS_EXTERNAL-true}

11
deploy/prometheus.yml Normal file
View file

@ -0,0 +1,11 @@
global:
scrape_interval: 15s
evaluation_interval: 15s
scrape_configs:
- job_name: prometheus
static_configs:
- targets: ["prometheus:9090"]
- job_name: flower
static_configs:
- targets: ["flower:5555"]

17
deploy/startup-backend.sh Executable file
View file

@ -0,0 +1,17 @@
#!/bin/bash
export LANGFLOW_DATABASE_URL="postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}"
# Your command to start the backend
# If the ENVIRONMENT variable is set to "development", then start the backend in development mode
# else start the backend in production mode with guvicorn
if [ "$ENVIRONMENT" = "development" ]; then
echo "Starting backend in development mode"
exec python -m uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --log-level ${LOG_LEVEL:-info} --workers 2 --reload
else
echo "Starting backend in production mode"
exec langflow run --host 0.0.0.0 --port 7860 --log-level ${LOG_LEVEL:-info} --workers -1 --backend-only
fi

View file

@ -15,4 +15,4 @@ COPY ./ ./
# Install dependencies
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi
CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"]
CMD ["uvicorn", "--factory", "src.backend.langflow.main:create_app", "--host", "0.0.0.0", "--port", "7860", "--reload", "--log-level", "debug"]

View file

@ -1,28 +1,33 @@
version: '3.4'
services:
backend:
volumes:
- ./:/app
build:
context: ./
dockerfile: ./dev.Dockerfile
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"]
ports:
- 7860:7860
- 5678:5678
restart: on-failure
frontend:
build:
context: ./src/frontend
dockerfile: ./dev.Dockerfile
args:
- BACKEND_URL=http://backend:7860
ports:
- "3000:3000"
volumes:
- ./src/frontend/public:/home/node/app/public
- ./src/frontend/src:/home/node/app/src
- ./src/frontend/package.json:/home/node/app/package.json
restart: on-failure
version: "3.4"
services:
backend:
volumes:
- ./:/app
build:
context: ./
dockerfile: ./dev.Dockerfile
command:
[
"sh",
"-c",
"pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload",
]
ports:
- 7860:7860
- 5678:5678
restart: on-failure
frontend:
build:
context: ./src/frontend
dockerfile: ./dev.Dockerfile
args:
- BACKEND_URL=http://backend:7860
ports:
- "3000:3000"
volumes:
- ./src/frontend/public:/home/node/app/public
- ./src/frontend/src:/home/node/app/src
- ./src/frontend/package.json:/home/node/app/package.json
restart: on-failure

View file

@ -1,4 +1,4 @@
version: '3'
version: "3"
services:
backend:
@ -9,7 +9,7 @@ services:
- "7860:7860"
volumes:
- ./:/app
command: bash -c "uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"
command: bash -c "uvicorn --factory src.backend.langflow.main:create_app --host 0.0.0.0 --port 7860 --reload"
frontend:
build:
@ -22,7 +22,7 @@ services:
ports:
- "3000:3000"
volumes:
- ./src/frontend/public:/home/node/app/public
- ./src/frontend/src:/home/node/app/src
- ./src/frontend/package.json:/home/node/app/package.json
restart: on-failure
- ./src/frontend/public:/home/node/app/public
- ./src/frontend/src:/home/node/app/src
- ./src/frontend/package.json:/home/node/app/package.json
restart: on-failure

9
docker_example/README.md Normal file
View file

@ -0,0 +1,9 @@
# LangFlow Docker Running
```sh
git clone git@github.com:logspace-ai/langflow.git
cd langflow/docker_example
docker compose up
```
The web UI will be accessible on port [7860](http://localhost:7860/)

View file

@ -6,5 +6,5 @@ services:
context: .
dockerfile: Dockerfile
ports:
- "5003:5003"
- "7860:7860"
command: langflow --host 0.0.0.0

View file

@ -33,6 +33,7 @@ The CustomComponent class serves as the foundation for creating custom component
| Supported Types |
| --------------------------------------------------------- |
| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ |
| _`langflow.field_typing.NestedDict`_ |
| _`langchain.chains.base.Chain`_ |
| _`langchain.PromptTemplate`_ |
| _`langchain.llms.base.BaseLLM`_ |
@ -44,6 +45,8 @@ The CustomComponent class serves as the foundation for creating custom component
| _`langchain.embeddings.base.Embeddings`_ |
| _`langchain.schema.BaseRetriever`_ |
The difference between _`dict`_ and _`langflow.field_typing.NestedDict`_ is that one adds a simple key-value pair field, while the other opens a more robust dictionary editor.
<Admonition type="info">
Unlike Langchain types, base Python types do not add a
[handle](../guidelines/components) to the field by default. To add handles,

View file

@ -73,3 +73,25 @@ Used to load [OpenAIs](https://openai.com/) embedding models.
- **request_timeout:** Used to specify the maximum amount of time, in milliseconds, to wait for a response from the OpenAI API when generating embeddings for a given text.
- **tiktoken_model_name:** Used to count the number of tokens in documents to constrain them to be under a certain limit. By default, when set to None, this will be the same as the embedding model name.
---
### VertexAIEmbeddings
Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) [Embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings).
:::info
Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). It provides access, management, and development of applications and services through global data centers. To use Vertex AI PaLM, you need to have the [google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/) Python package installed and credentials configured for your environment.
:::
- **credentials:** The default custom credentials (google.auth.credentials.Credentials) to use.
- **location:** The default location to use when making API calls defaults to `us-central1`.
- **max_output_tokens:** Token limit determines the maximum amount of text output from one prompt defaults to `128`.
- **model_name:** The name of the Vertex AI large language model defaults to `text-bison`.
- **project:** The default GCP project to use when making Vertex API calls.
- **request_parallelism:** The amount of parallelism allowed for requests issued to VertexAI models defaults to `5`.
- **temperature:** Tunes the degree of randomness in text generations. Should be a non-negative value defaults to `0`.
- **top_k:** How the model selects tokens for output, the next token is selected from defaults to `40`.
- **top_p:** Tokens are selected from most probable to least until the sum of their defaults to `0.95`.
- **tuned_model_name:** The name of a tuned model. If provided, model_name is ignored.
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output defaults to `False`.

View file

@ -185,6 +185,28 @@ Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) large lang
Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). It provides access, management, and development of applications and services through global data centers. To use Vertex AI PaLM, you need to have the [google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/) Python package installed and credentials configured for your environment.
:::
- **credentials:** The default custom credentials (google.auth.credentials.Credentials) to use.
- **location:** The default location to use when making API calls defaults to `us-central1`.
- **max_output_tokens:** Token limit determines the maximum amount of text output from one prompt defaults to `128`.
- **model_name:** The name of the Vertex AI large language model defaults to `text-bison`.
- **project:** The default GCP project to use when making Vertex API calls.
- **request_parallelism:** The amount of parallelism allowed for requests issued to VertexAI models defaults to `5`.
- **temperature:** Tunes the degree of randomness in text generations. Should be a non-negative value defaults to `0`.
- **top_k:** How the model selects tokens for output, the next token is selected from defaults to `40`.
- **top_p:** Tokens are selected from most probable to least until the sum of their defaults to `0.95`.
- **tuned_model_name:** The name of a tuned model. If provided, model_name is ignored.
- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output defaults to `False`.
---
### ChatVertexAI
Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) large language models.
:::info
Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). It provides access, management, and development of applications and services through global data centers. To use Vertex AI PaLM, you need to have the [google-cloud-aiplatform](https://pypi.org/project/google-cloud-aiplatform/) Python package installed and credentials configured for your environment.
:::
- **credentials:** The default custom credentials (google.auth.credentials.Credentials) to use.
- **location:** The default location to use when making API calls defaults to `us-central1`.
- **max_output_tokens:** Token limit determines the maximum amount of text output from one prompt defaults to `128`.

View file

@ -1,101 +0,0 @@
# Deploy on Jina AI Cloud
Langflow integrates with langchain-serve to provide a one-command deployment to [Jina AI Cloud](https://github.com/jina-ai/langchain-serve).
Start by installing `langchain-serve` with
```bash
pip install -U langchain-serve
```
Then, run:
```bash
langflow --jcloud
```
```text
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://<your-app>.wolf.jina.ai/
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
```
**Complete (example) output:**
```text
🚀 Deploying Langflow server on Jina AI Cloud
╭───────────────────────── 🎉 Flow is available! ──────────────────────────╮
│ │
│ ID langflow-e3dd8820ec │
│ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai │
│ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec │
│ │
╰──────────────────────────────────────────────────────────────────────────╯
╭──────────────┬──────────────────────────────────────────────────────────────────────────────╮
│ App ID │ langflow-e3dd8820ec │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ Phase │ Serving │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ Endpoint │ wss://langflow-e3dd8820ec.wolf.jina.ai │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ App logs │ dashboards.wolf.jina.ai │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ Swagger UI │ https://langflow-e3dd8820ec.wolf.jina.ai/docs │
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
│ OpenAPI JSON │ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json │
╰──────────────┴──────────────────────────────────────────────────────────────────────────────╯
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
```
## API Usage (with python)
You can use Langflow directly on your browser or the API endpoints on Jina AI Cloud to interact with the server.
```python
import requests
BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict"
FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468"
# You can tweak the flow by adding a tweaks dictionary
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
TWEAKS = {
"ChatOpenAI-g4jEr": {},
"ConversationChain-UidfJ": {}
}
def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
"""
Run a flow with a given message and optional tweaks.
:param message: The message to send to the flow
:param flow_id: The ID of the flow to run
:param tweaks: Optional tweaks to customize the flow
:return: The JSON response from the flow
"""
api_url = f"{BASE_API_URL}/{flow_id}"
payload = {"message": message}
if tweaks:
payload["tweaks"] = tweaks
response = requests.post(api_url, json=payload)
return response.json()
# Setup any tweaks you want to apply to the flow
print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
```
```json
{
"result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!"
}
```
:::info
Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
:::

View file

@ -0,0 +1,147 @@
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
# API Keys
## Introduction
Langflow offers an API Key functionality that allows users to access their individual components and flows without going through traditional login authentication. The API Key is a user-specific token that can be included in the request's header or query parameter to authenticate API calls. The following documentation outlines how to generate, use, and manage these API Keys in Langflow.
## Generating an API Key
### Through Langflow UI
{/* add image img/api-key.png */}
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: useBaseUrl("img/api-key.png"),
}}
style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }}
/>
1. Click on the "API Key" icon.
2. Click on "Create new secret key".
3. Give it an optional name.
4. Click on "Create secret key".
5. Copy the API key and store it in a secure location.
## Using the API Key
### Using the `x-api-key` Header
Include the `x-api-key` in the HTTP header when making API requests:
```bash
curl -X POST \
http://localhost:3000/api/v1/process/<your_flow_id> \
-H 'Content-Type: application/json'\
-H 'x-api-key: <your api key>'\
-d '{"inputs": {"text":""}, "tweaks": {}}'
```
With Python using `requests`:
```python
import requests
from typing import Optional
BASE_API_URL = "http://localhost:3001/api/v1/process"
FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df"
# You can tweak the flow by adding a tweaks dictionary
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
TWEAKS = {}
def run_flow(inputs: dict,
flow_id: str,
tweaks: Optional[dict] = None,
apiKey: Optional[str] = None) -> dict:
"""
Run a flow with a given message and optional tweaks.
:param message: The message to send to the flow
:param flow_id: The ID of the flow to run
:param tweaks: Optional tweaks to customize the flow
:return: The JSON response from the flow
"""
api_url = f"{BASE_API_URL}/{flow_id}"
payload = {"inputs": inputs}
headers = {}
if tweaks:
payload["tweaks"] = tweaks
if apiKey:
headers = {"x-api-key": apiKey}
response = requests.post(api_url, json=payload, headers=headers)
return response.json()
# Setup any tweaks you want to apply to the flow
inputs = {"text":""}
api_key = "<your api key>"
print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key))
```
### Using the Query Parameter
Alternatively, you can include the API key as a query parameter in the URL:
```bash
curl -X POST \
http://localhost:3000/api/v1/process/<your_flow_id>?x-api-key=<your_api_key> \
-H 'Content-Type: application/json'\
-d '{"inputs": {"text":""}, "tweaks": {}}'
```
Or with Python:
```python
import requests
BASE_API_URL = "http://localhost:3001/api/v1/process"
FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df"
# You can tweak the flow by adding a tweaks dictionary
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
TWEAKS = {}
def run_flow(inputs: dict,
flow_id: str,
tweaks: Optional[dict] = None,
apiKey: Optional[str] = None) -> dict:
"""
Run a flow with a given message and optional tweaks.
:param message: The message to send to the flow
:param flow_id: The ID of the flow to run
:param tweaks: Optional tweaks to customize the flow
:return: The JSON response from the flow
"""
api_url = f"{BASE_API_URL}/{flow_id}"
payload = {"inputs": inputs}
headers = {}
if tweaks:
payload["tweaks"] = tweaks
if apiKey:
api_url += f"?x-api-key={apiKey}"
response = requests.post(api_url, json=payload, headers=headers)
return response.json()
# Setup any tweaks you want to apply to the flow
inputs = {"text":""}
api_key = "<your api key>"
print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key))
```
## Security Considerations
- **Visibility**: The API key won't be retrievable again through the UI for security reasons.
- **Scope**: The key only allows access to the flows and components of the specific user to whom it was issued.
## Revoking an API Key
To revoke an API key, simply delete it from the UI. This will immediately invalidate the key and prevent it from being used again.

View file

@ -0,0 +1,73 @@
import Admonition from "@theme/Admonition";
# Asynchronous Processing
## Introduction
Starting from version 0.5, Langflow introduces a new feature to its API: the _`sync`_ flag. This flag allows users to opt for asynchronous processing of their flows, freeing up resources and enabling better control over long-running tasks.
This feature supports running tasks in a Celery worker queue and AnyIO task groups for now.
<Admonition type="warning" caption="Experimental Feature">
This is an experimental feature. The default behavior of the API is still
synchronous processing. The API may change in the future.
</Admonition>
## The _`sync`_ Flag
The _`sync`_ flag can be included in the payload of your POST request to the _`/api/v1/process/<your_flow_id>`_ endpoint.
When set to _`false`_, the API will initiate an asynchronous task instead of processing the flow synchronously.
### API Request with _`sync`_ flag
```bash
curl -X POST \
http://localhost:3000/api/v1/process/<your_flow_id> \
-H 'Content-Type: application/json' \
-H 'x-api-key: <your_api_key>' \
-d '{"inputs": {"text": ""}, "tweaks": {}, "sync": false}'
```
Response:
```json
{
"result": {
"output": "..."
},
"task": {
"id": "...",
"href": "api/v1/task/<task_id>"
},
"session_id": "...",
"backend": "..." // celery or anyio
}
```
## Checking Task Status
You can check the status of an asynchronous task by making a GET request to the `/task/{task_id}` endpoint.
```bash
curl -X GET \
http://localhost:3000/api/v1/task/<task_id> \
-H 'x-api-key: <your_api_key>'
```
### Response
The endpoint will return the current status of the task and, if completed, the result of the task. Possible statuses include:
- _`PENDING`_: The task is waiting for execution.
- _`SUCCESS`_: The task has completed successfully.
- _`FAILURE`_: The task has failed.
Example response for a completed task:
```json
{
"status": "SUCCESS",
"result": {
"output": "..."
}
}
```

View file

@ -387,7 +387,7 @@ Your structure should look something like this:
The recommended way to load custom components is to set the _`LANGFLOW_COMPONENTS_PATH`_ environment variable to the path of your custom components directory. Then, run the Langflow CLI as usual.
```bash
export LANGFLOW_COMPONENTS_PATH=/path/to/components
export LANGFLOW_COMPONENTS_PATH='["/path/to/components"]'
langflow
```

View file

@ -0,0 +1,128 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
import Admonition from "@theme/Admonition";
# Sign up and Sign in
## Introduction
The login functionality in Langflow serves to authenticate users and protect sensitive routes in the application. Starting from version 0.5, Langflow introduces an enhanced login mechanism that is governed by a few environment variables. This allows new secure features.
## Environment Variables
The following environment variables are crucial in configuring the login settings:
- _`LANGFLOW_AUTO_LOGIN`_: Determines whether Langflow should automatically log users in. Default is `True`.
- _`LANGFLOW_SUPERUSER`_: The username of the superuser.
- _`LANGFLOW_SUPERUSER_PASSWORD`_: The password for the superuser.
- _`LANGFLOW_SECRET_KEY`_: A key used for encrypting the superuser's password.
- _`LANGFLOW_NEW_USER_IS_ACTIVE`_: Determines whether new users are automatically activated. Default is `False`.
All of these variables can be passed to the CLI command _`langflow run`_ through the _`--env-file`_ option. For example:
```bash
langflow run --env-file .env
```
<Admonition type="info">
It is critical not to expose these environment variables in your code
repository. Always set them securely in your deployment environment, for
example, using Docker secrets, Kubernetes ConfigMaps/Secrets, or dedicated
secure environment configuration systems like AWS Secrets Manager.
</Admonition>
### _`LANGFLOW_AUTO_LOGIN`_
By default, this variable is set to `True`. When enabled (`True`), Langflow operates as it did in versions prior to 0.5—automatic login without requiring explicit user authentication.
To disable automatic login and enforce user authentication:
```bash
export LANGFLOW_AUTO_LOGIN=False
```
### _`LANGFLOW_SUPERUSER`_ and _`LANGFLOW_SUPERUSER_PASSWORD`_
These environment variables are only relevant when `LANGFLOW_AUTO_LOGIN` is set to `False`. They specify the username and password for the superuser, which is essential for administrative tasks.
To create a superuser manually:
```bash
export LANGFLOW_SUPERUSER=admin
export LANGFLOW_SUPERUSER_PASSWORD=securepassword
```
You can also use the CLI command `langflow superuser` to set up a superuser interactively.
### _`LANGFLOW_SECRET_KEY`_
This environment variable holds a secret key used for encrypting the superuser's password. Make sure to set this to a secure, randomly generated string.
```bash
export LANGFLOW_SECRET_KEY=randomly_generated_secure_key
```
### _`LANGFLOW_NEW_USER_IS_ACTIVE`_
By default, this variable is set to `False`. When enabled (`True`), new users are automatically activated and can log in without requiring explicit activation by the superuser.
## Command-Line Interface
Langflow provides a command-line utility for managing superusers:
```bash
langflow superuser
```
This command prompts you to enter the username and password for the superuser, unless they are already set using environment variables.
## Sign-up
With _`LANGFLOW_AUTO_LOGIN`_ set to _`False`_, Langflow requires users to sign up before they can log in. The sign-up page is the default landing page when a user visits Langflow for the first time.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: useBaseUrl("img/sign-up.png"),
}}
style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }}
/>
## Profile settings
Users can change their profile settings by clicking on the profile icon in the top right corner of the application. This opens a dropdown menu with the following options:
- **Admin Page**: Opens the admin page, which is only accessible to the superuser.
- **Profile Settings**: Opens the profile settings page.
- **Sign Out**: Logs the user out.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: useBaseUrl("img/my-account.png"),
}}
style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }}
/>
By clicking on **Profile Settings**, the user is taken to the profile settings page, where they can change their password and their profile picture.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: useBaseUrl("img/profile-settings.png"),
}}
style={{ maxWidth: "600px", margin: "0 auto" }}
/>
By clicking on **Admin Page**, the superuser is taken to the admin page, where they can manage users and groups.
<ZoomableImage
alt="Docusaurus themed image"
sources={{
light: useBaseUrl("img/admin-page.png"),
}}
style={{ maxWidth: "600px", margin: "0 auto" }}
/>

View file

@ -0,0 +1,44 @@
import Admonition from "@theme/Admonition";
# Async API
## Introduction
<Admonition type="info" caption="In development">
This implementation is still in development. Contributions are welcome!
</Admonition>
The Async API is an implementation of the Langflow API that uses [Celery](https://docs.celeryproject.org/en/stable/)
to run the tasks asynchronously, using a message broker to send and receive messages, a result backend to store the results and a cache to store the task states and session data.
### Configuration
The folder _`./deploy`_ in the [Github repository](https://github.com/logspace-ai/langflow) contains a _`.env.example`_ file that can be used to configure a Langflow deployment.
The file contains the variables required to configure a Celery worker queue, Redis cache and result backend and a RabbitMQ message broker.
To set it up locally you can copy the file to _`.env`_ and run the following command:
```bash
docker compose up -d
```
This will set up the following containers:
- Langflow API
- Celery worker
- RabbitMQ message broker
- Redis cache
- PostgreSQL database
- PGAdmin
- Flower
- Traefik
- Grafana
- Prometheus
### Testing
To run the tests for the Async API, you can run the following command:
```bash
docker compose -f docker-compose.with_tests.yml up --exit-code-from tests tests result_backend broker celeryworker db --build
```

View file

@ -0,0 +1,7 @@
import ThemedImage from "@theme/ThemedImage";
import useBaseUrl from "@docusaurus/useBaseUrl";
import ZoomableImage from "/src/theme/ZoomableImage.js";
import ReactPlayer from "react-player";
Now, we need to explain what are the permissions the superuser gets. Once logged in, they can activate new users,
edit them,

View file

@ -28,7 +28,7 @@
"medium-zoom": "^1.0.8",
"node-fetch": "^3.3.1",
"path-browserify": "^1.0.1",
"postcss": "^8.4.24",
"postcss": "^8.4.31",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",
@ -13956,9 +13956,9 @@
}
},
"node_modules/postcss": {
"version": "8.4.25",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.25.tgz",
"integrity": "sha512-7taJ/8t2av0Z+sQEvNzCkpDynl0tX3uJMCODi6nT3PfASC7dYCWV9aQ+uiCf+KBD4SEFcu+GvJdGdwzQ6OSjCw==",
"version": "8.4.31",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz",
"integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==",
"funding": [
{
"type": "opencollective",

View file

@ -34,7 +34,7 @@
"medium-zoom": "^1.0.8",
"node-fetch": "^3.3.1",
"path-browserify": "^1.0.1",
"postcss": "^8.4.24",
"postcss": "^8.4.31",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",

View file

@ -16,6 +16,9 @@ module.exports = {
label: "Guidelines",
collapsed: false,
items: [
"guidelines/login",
"guidelines/api",
"guidelines/async-api",
"guidelines/components",
"guidelines/features",
"guidelines/collection",
@ -52,6 +55,7 @@ module.exports = {
label: "Step-by-Step Guides",
collapsed: false,
items: [
"guides/async-tasks",
"guides/loading_document",
"guides/chatprompttemplate_guide",
"guides/langfuse_integration",
@ -87,7 +91,7 @@ module.exports = {
type: "category",
label: "Deployment",
collapsed: false,
items: ["deployment/gcp-deployment", "deployment/jina-deployment"],
items: ["deployment/gcp-deployment"],
},
{
type: "category",

View file

@ -1,101 +1,101 @@
Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees
1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498
2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952
3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287
4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921
5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870
6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914
7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832
8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389
9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167
10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698
11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473
12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011
13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862
14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079
15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769
16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508
17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986
18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589
19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961
20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984
21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010
22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185
23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987
24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038
25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215
26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941
27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122
28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046
29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664
30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155
31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365
32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135
33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516
34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443
35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610
36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312
37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638
38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995
39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715
40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585
41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067
42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678
43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816
44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645
45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034
46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746
47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987
48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443
49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073
50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069
51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527
52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445
53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450
54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825
55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942
56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418
57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202
58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245
59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741
60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923
61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346
62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097
63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992
64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315
65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829
66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602
67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911
68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934
69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130
70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563
71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146
72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874
73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696
74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004
75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480
76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012
77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529
78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869
79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182
80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805
81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433
82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044
83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013
84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571
85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880
86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215
87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786
88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168
89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484
90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927
91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881
92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897
93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172
94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483
95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873
96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988
97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292
98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236
99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339
100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785
Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees
1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498
2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952
3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287
4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921
5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870
6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914
7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832
8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389
9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167
10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698
11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473
12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011
13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862
14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079
15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769
16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508
17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986
18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589
19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961
20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984
21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010
22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185
23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987
24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038
25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215
26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941
27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122
28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046
29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664
30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155
31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365
32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135
33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516
34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443
35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610
36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312
37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638
38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995
39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715
40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585
41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067
42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678
43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816
44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645
45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034
46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746
47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987
48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443
49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073
50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069
51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527
52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445
53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450
54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825
55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942
56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418
57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202
58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245
59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741
60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923
61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346
62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097
63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992
64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315
65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829
66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602
67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911
68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934
69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130
70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563
71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146
72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874
73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696
74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004
75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480
76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012
77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529
78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869
79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182
80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805
81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433
82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044
83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013
84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571
85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880
86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215
87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786
88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168
89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484
90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927
91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881
92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897
93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172
94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483
95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873
96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988
97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292
98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236
99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339
100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785

1 Index Organization Id Name Website Country Description Founded Industry Number of employees
2 1 FAB0d41d5b5d22c Ferrell LLC https://price.net/ Papua New Guinea Horizontal empowering knowledgebase 1990 Plastics 3498
3 2 6A7EdDEA9FaDC52 Mckinney, Riley and Day http://www.hall-buchanan.info/ Finland User-centric system-worthy leverage 2015 Glass / Ceramics / Concrete 4952
4 3 0bFED1ADAE4bcC1 Hester Ltd http://sullivan-reed.com/ China Switchable scalable moratorium 1971 Public Safety 5287
5 4 2bFC1Be8a4ce42f Holder-Sellers https://becker.com/ Turkmenistan De-engineered systemic artificial intelligence 2004 Automotive 921
6 5 9eE8A6a4Eb96C24 Mayer Group http://www.brewer.com/ Mauritius Synchronized needs-based challenge 1991 Transportation 7870
7 6 cC757116fe1C085 Henry-Thompson http://morse.net/ Bahamas Face-to-face well-modulated customer loyalty 1992 Primary / Secondary Education 4914
8 7 219233e8aFF1BC3 Hansen-Everett https://www.kidd.org/ Pakistan Seamless disintermediate collaboration 2018 Publishing Industry 7832
9 8 ccc93DCF81a31CD Mcintosh-Mora https://www.brooks.com/ Heard Island and McDonald Islands Centralized attitude-oriented capability 1970 Import / Export 4389
10 9 0B4F93aA06ED03e Carr Inc http://ross.com/ Kuwait Distributed impactful customer loyalty 1996 Plastics 8167
11 10 738b5aDe6B1C6A5 Gaines Inc http://sandoval-hooper.com/ Uzbekistan Multi-lateral scalable protocol 1997 Outsourcing / Offshoring 9698
12 11 AE61b8Ffebbc476 Kidd Group http://www.lyons.com/ Bouvet Island (Bouvetoya) Proactive foreground paradigm 2001 Primary / Secondary Education 7473
13 12 eb3B7D06cCdD609 Crane-Clarke https://www.sandoval.com/ Denmark Front-line clear-thinking encryption 2014 Food / Beverages 9011
14 13 8D0c29189C9798B Keller, Campos and Black https://www.garner.info/ Liberia Ameliorated directional emulation 2020 Museums / Institutions 2862
15 14 D2c91cc03CA394c Glover-Pope http://www.silva.biz/ United Arab Emirates Persevering contextually-based approach 2013 Medical Practice 9079
16 15 C8AC1eaf9C036F4 Pacheco-Spears https://aguilar.com/ Sweden Secured logistical synergy 1984 Maritime 769
17 16 b5D10A14f7a8AfE Hodge-Ayers http://www.archer-elliott.com/ Honduras Future-proofed radical implementation 1990 Facilities Services 8508
18 17 68139b5C4De03B4 Bowers, Guerra and Krause http://www.carrillo-nicholson.com/ Uganda De-engineered transitional strategy 1972 Primary / Secondary Education 6986
19 18 5c2EffEfdba2BdF Mckenzie-Melton http://montoya-thompson.com/ Hong Kong Reverse-engineered heuristic alliance 1998 Investment Management / Hedge Fund / Private Equity 4589
20 19 ba179F19F7925f5 Branch-Mann http://www.lozano.com/ Botswana Adaptive intangible frame 1999 Architecture / Planning 7961
21 20 c1Ce9B350BAc66b Weiss and Sons https://barrett.com/ Korea Sharable optimal functionalities 2011 Plastics 5984
22 21 8de40AC4e6EaCa4 Velez, Payne and Coffey http://burton.com/ Luxembourg Mandatory coherent synergy 1986 Wholesale 5010
23 22 Aad86a4F0385F2d Harrell LLC http://www.frey-rosario.com/ Guadeloupe Reverse-engineered mission-critical moratorium 2018 Construction 2185
24 23 22aC3FFd64fD703 Eaton, Reynolds and Vargas http://www.freeman.biz/ Monaco Self-enabling multi-tasking process improvement 2014 Luxury Goods / Jewelry 8987
25 24 5Ec4C272bCf085c Robbins-Cummings http://donaldson-wilkins.com/ Belgium Organic non-volatile hierarchy 1991 Pharmaceuticals 5038
26 25 5fDBeA8BB91a000 Jenkins Inc http://www.kirk.biz/ South Africa Front-line systematic help-desk 2002 Insurance 1215
27 26 dFfD6a6F9AC2d9C Greene, Benjamin and Novak http://www.kent.net/ Romania Centralized leadingedge moratorium 2012 Museums / Institutions 4941
28 27 4B217cC5a0674C5 Dickson, Richmond and Clay http://everett.com/ Czech Republic Team-oriented tangible complexity 1980 Real Estate / Mortgage 3122
29 28 88b1f1cDcf59a37 Prince-David http://thompson.com/ Christmas Island Virtual holistic methodology 1970 Banking / Mortgage 1046
30 29 f9F7bBCAEeC360F Ayala LLC http://www.zhang.com/ Philippines Open-source zero administration hierarchy 2021 Legal Services 7664
31 30 7Cb3AeFcE4Ba31e Rivas Group https://hebert.org/ Australia Open-architected well-modulated capacity 1998 Logistics / Procurement 4155
32 31 ccBcC32adcbc530 Sloan, Mays and Whitehead http://lawson.com/ Chad Face-to-face high-level conglomeration 1997 Civil Engineering 365
33 32 f5afd686b3d05F5 Durham, Allen and Barnes http://chan-stafford.org/ Zimbabwe Synergistic web-enabled framework 1993 Mechanical or Industrial Engineering 6135
34 33 38C6cfC5074Fa5e Fritz-Franklin http://www.lambert.com/ Nepal Automated 4thgeneration website 1972 Hospitality 4516
35 34 5Cd7efccCcba38f Burch-Ewing http://cline.net/ Taiwan User-centric 4thgeneration system engine 1981 Venture Capital / VC 7443
36 35 9E6Acb51e3F9d6F Glass, Barrera and Turner https://dunlap.com/ Kyrgyz Republic Multi-channeled 3rdgeneration open system 2020 Utilities 2610
37 36 4D4d7E18321eaeC Pineda-Cox http://aguilar.org/ Bolivia Fundamental asynchronous capability 2010 Human Resources / HR 1312
38 37 485f5d06B938F2b Baker, Mccann and Macdonald http://www.anderson-barker.com/ Kenya Cross-group user-facing focus group 2013 Legislative Office 1638
39 38 19E3a5Bf6dBDc4F Cuevas-Moss https://dodson-castaneda.net/ Guatemala Extended human-resource intranet 1994 Music 9995
40 39 6883A965c7b68F7 Hahn PLC http://newman.com/ Belarus Organic logistical leverage 2012 Electrical / Electronic Manufacturing 3715
41 40 AC5B7AA74Aa4A2E Valentine, Ferguson and Kramer http://stuart.net/ Jersey Centralized secondary time-frame 1997 Non - Profit / Volunteering 3585
42 41 decab0D5027CA6a Arroyo Inc https://www.turner.com/ Grenada Managed demand-driven website 2006 Writing / Editing 9067
43 42 dF084FbBb613eea Walls LLC http://www.reese-vasquez.biz/ Cape Verde Self-enabling fresh-thinking installation 1989 Investment Management / Hedge Fund / Private Equity 1678
44 43 A2D89Ab9bCcAd4e Mitchell, Warren and Schneider https://fox.biz/ Trinidad and Tobago Enhanced intangible time-frame 2021 Capital Markets / Hedge Fund / Private Equity 3816
45 44 77aDc905434a49f Prince PLC https://www.watts.com/ Sweden Profit-focused coherent installation 2016 Individual / Family Services 7645
46 45 235fdEFE2cfDa5F Brock-Blackwell http://www.small.com/ Benin Secured foreground emulation 1986 Online Publishing 7034
47 46 1eD64cFe986BBbE Walton-Barnett https://ashley-schaefer.com/ Western Sahara Right-sized clear-thinking flexibility 2001 Luxury Goods / Jewelry 1746
48 47 CbBbFcdd0eaE2cF Bartlett-Arroyo https://cruz.com/ Northern Mariana Islands Realigned didactic function 1976 Civic / Social Organization 3987
49 48 49aECbDaE6aBD53 Wallace, Madden and Morris http://www.blevins-fernandez.biz/ Germany Persistent real-time customer loyalty 2016 Pharmaceuticals 9443
50 49 7b3fe6e7E72bFa4 Berg-Sparks https://cisneros-love.com/ Canada Stand-alone static implementation 1974 Arts / Crafts 2073
51 50 c6DedA82A8aef7E Gonzales Ltd http://bird.com/ Tonga Managed human-resource policy 1988 Consumer Goods 9069
52 51 7D9FBF85cdC3871 Lawson and Sons https://www.wong.com/ French Southern Territories Compatible analyzing intranet 2021 Arts / Crafts 3527
53 52 7dd18Fb7cB07b65 Mcguire, Mcconnell and Olsen https://melton-briggs.com/ Korea Profound client-server frame 1988 Printing 8445
54 53 EF5B55FadccB8Fe Charles-Phillips https://bowman.com/ Cote d'Ivoire Monitored client-server implementation 2012 Mental Health Care 3450
55 54 f8D4B99e11fAF5D Odom Ltd https://www.humphrey-hess.com/ Cote d'Ivoire Advanced static process improvement 2012 Management Consulting 1825
56 55 e24D21BFd3bF1E5 Richard PLC https://holden-coleman.net/ Mayotte Object-based optimizing model 1971 Broadcast Media 4942
57 56 B9BdfEB6D3Ca44E Sampson Ltd https://blevins.com/ Cayman Islands Intuitive local adapter 2005 Farming 1418
58 57 2a74D6f3D3B268e Cherry, Le and Callahan https://waller-delacruz.biz/ Nigeria Universal human-resource collaboration 2017 Entertainment / Movie Production 7202
59 58 Bf3F3f62c8aBC33 Cherry PLC https://www.avila.info/ Marshall Islands Persistent tertiary website 1980 Plastics 8245
60 59 aeBe26B80a7a23c Melton-Nichols https://kennedy.com/ Palau User-friendly clear-thinking productivity 2021 Legislative Office 8741
61 60 aAeb29ad43886C6 Potter-Walsh http://thomas-french.org/ Turkey Optional non-volatile open system 2008 Human Resources / HR 6923
62 61 bD1bc6bB6d1FeD3 Freeman-Chen https://mathis.com/ Timor-Leste Phased next generation adapter 1973 International Trade / Development 346
63 62 EB9f456e8b7022a Soto Group https://norris.info/ Vietnam Enterprise-wide executive installation 1988 Business Supplies / Equipment 9097
64 63 Dfef38C51D8DAe3 Poole, Cruz and Whitney https://reed.info/ Reunion Balanced analyzing groupware 1978 Marketing / Advertising / Sales 2992
65 64 055ffEfB2Dd95B0 Riley Ltd http://wiley.com/ Brazil Optional exuding superstructure 1986 Textiles 9315
66 65 cBfe4dbAE1699da Erickson, Andrews and Bailey https://www.hobbs-grant.com/ Eritrea Vision-oriented secondary project 2014 Consumer Electronics 7829
67 66 fdFbecbadcdCdf1 Wilkinson, Charles and Arroyo http://hunter-mcfarland.com/ United States Virgin Islands Assimilated 24/7 archive 1996 Building Materials 602
68 67 5DCb8A5a5ca03c0 Floyd Ltd http://www.whitney.com/ Falkland Islands (Malvinas) Function-based fault-tolerant concept 2017 Public Relations / PR 2911
69 68 ce57DCbcFD6d618 Newman-Galloway https://www.scott.com/ Luxembourg Enhanced foreground collaboration 1987 Information Technology / IT 3934
70 69 5aaD187dc929371 Frazier-Butler https://www.daugherty-farley.info/ Northern Mariana Islands Persistent interactive circuit 1972 Outsourcing / Offshoring 5130
71 70 902D7Ac8b6d476b Newton Inc https://www.richmond-manning.info/ Netherlands Antilles Fundamental stable info-mediaries 1976 Military Industry 563
72 71 32BB9Ff4d939788 Duffy-Levy https://www.potter.com/ Guernsey Diverse exuding installation 1982 Wireless 6146
73 72 adcB0afbE58bAe3 Wagner LLC https://decker-esparza.com/ Uruguay Reactive attitude-oriented toolset 1987 International Affairs 6874
74 73 dfcA1c84AdB61Ac Mccall-Holmes http://www.dean.com/ Benin Object-based value-added database 2009 Legal Services 696
75 74 208044AC2fe52F3 Massey LLC https://frazier.biz/ Suriname Configurable zero administration Graphical User Interface 1986 Accounting 5004
76 75 f3C365f0c1A0623 Hicks LLC http://alvarez.biz/ Pakistan Quality-focused client-server Graphical User Interface 1970 Computer Software / Engineering 8480
77 76 ec5Bdd3CBAfaB93 Cole, Russell and Avery http://www.blankenship.com/ Mongolia De-engineered fault-tolerant challenge 2000 Law Enforcement 7012
78 77 DDB19Be7eeB56B4 Cummings-Rojas https://simon-pearson.com/ Svalbard & Jan Mayen Islands User-centric modular customer loyalty 2012 Financial Services 7529
79 78 dd6CA3d0bc3cAfc Beasley, Greene and Mahoney http://www.petersen-lawrence.com/ Togo Extended content-based methodology 1976 Religious Institutions 869
80 79 A0B9d56e61070e3 Beasley, Sims and Allison http://burke.info/ Latvia Secured zero tolerance hub 1972 Facilities Services 6182
81 80 cBa7EFe5D05Adaf Crawford-Rivera https://black-ramirez.org/ Cuba Persevering exuding budgetary management 1999 Online Publishing 7805
82 81 Ea3f6D52Ec73563 Montes-Hensley https://krueger.org/ Liechtenstein Multi-tiered secondary productivity 2009 Printing 8433
83 82 bC0CEd48A8000E0 Velazquez-Odom https://stokes.com/ Djibouti Streamlined 6thgeneration function 2002 Alternative Dispute Resolution 4044
84 83 c89b9b59BC4baa1 Eaton-Morales https://www.reeves-graham.com/ Micronesia Customer-focused explicit frame 1990 Capital Markets / Hedge Fund / Private Equity 7013
85 84 FEC51bce8421a7b Roberson, Pennington and Palmer http://www.keith-fisher.com/ Cameroon Adaptive bi-directional hierarchy 1993 Telecommunications 5571
86 85 e0E8e27eAc9CAd5 George, Russo and Guerra https://drake.com/ Sweden Centralized non-volatile capability 1989 Military Industry 2880
87 86 B97a6CF9bf5983C Davila Inc https://mcconnell.info/ Cocos (Keeling) Islands Profit-focused dedicated frame 2017 Consumer Electronics 2215
88 87 a0a6f9b3DbcBEb5 Mays-Preston http://www.browning-key.com/ Mali User-centric heuristic focus group 2006 Military Industry 5786
89 88 8cC1bDa330a5871 Pineda-Morton https://www.carr.com/ United States Virgin Islands Grass-roots methodical info-mediaries 1991 Printing 6168
90 89 ED889CB2FE9cbd3 Huang and Sons https://www.bolton.com/ Eritrea Re-contextualized dynamic hierarchy 1981 Semiconductors 7484
91 90 F4Dc1417BC6cb8f Gilbert-Simon https://www.bradford.biz/ Burundi Grass-roots radical parallelism 1973 Newspapers / Journalism 1927
92 91 7ABc3c7ecA03B34 Sampson-Griffith http://hendricks.org/ Benin Multi-layered composite paradigm 1972 Textiles 3881
93 92 4e0719FBE38e0aB Miles-Dominguez http://www.turner.com/ Gibraltar Organized empowering forecast 1996 Civic / Social Organization 897
94 93 dEbDAAeDfaed00A Rowe and Sons https://www.simpson.org/ El Salvador Balanced multimedia knowledgebase 1978 Facilities Services 8172
95 94 61BDeCfeFD0cEF5 Valenzuela, Holmes and Rowland https://www.dorsey.net/ Taiwan Persistent tertiary focus group 1999 Transportation 1483
96 95 4e91eD25f486110 Best, Wade and Shepard https://zimmerman.com/ Zimbabwe Innovative background definition 1991 Gambling / Casinos 4873
97 96 0a0bfFbBbB8eC7c Holmes Group https://mcdowell.org/ Ethiopia Right-sized zero tolerance focus group 1975 Photography 2988
98 97 BA6Cd9Dae2Efd62 Good Ltd http://duffy.com/ Anguilla Reverse-engineered composite moratorium 1971 Consumer Services 4292
99 98 E7df80C60Abd7f9 Clements-Espinoza http://www.flowers.net/ Falkland Islands (Malvinas) Progressive modular hub 1991 Broadcast Media 236
100 99 AFc285dbE2fEd24 Mendez Inc https://www.burke.net/ Kyrgyz Republic User-friendly exuding migration 1993 Education Management 339
101 100 e9eB5A60Cef8354 Watkins-Kaiser http://www.herring.com/ Togo Synergistic background access 2009 Financial Services 2785

BIN
docs/static/img/admin-page.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 171 KiB

BIN
docs/static/img/api-key.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

After

Width:  |  Height:  |  Size: 31 KiB

Before After
Before After

BIN
docs/static/img/my-account.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 MiB

After

Width:  |  Height:  |  Size: 3.2 MiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 MiB

After

Width:  |  Height:  |  Size: 3.2 MiB

Before After
Before After

BIN
docs/static/img/profile-settings.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 341 KiB

BIN
docs/static/img/sign-up.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2 MiB

After

Width:  |  Height:  |  Size: 2 MiB

Before After
Before After

3703
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.4.21"
version = "0.5.0"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -19,24 +19,24 @@ readme = "README.md"
keywords = ["nlp", "langchain", "openai", "gpt", "gui"]
packages = [{ include = "langflow", from = "src/backend" }]
include = ["src/backend/langflow/*", "src/backend/langflow/**/*"]
documentation = "https://docs.langflow.org"
[tool.poetry.scripts]
langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
pandas = "^2.0.0"
python = ">=3.9,<3.11"
fastapi = "^0.100.0"
uvicorn = "^0.22.0"
fastapi = "^0.103.0"
uvicorn = "^0.23.0"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^21.2.0"
langchain = "^0.0.271"
langchain = "^0.0.308"
openai = "^0.27.8"
chromadb = "^0.3.0"
pandas = "2.0.3"
chromadb = "^0.3.21"
huggingface-hub = { version = "^0.16.0", extras = ["inference"] }
rich = "^13.5.0"
llama-cpp-python = { version = "~0.1.0", optional = true }
@ -49,21 +49,19 @@ fake-useragent = "^1.2.1"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^12.0.0"
tiktoken = "~0.4.0"
tiktoken = "~0.5.0"
wikipedia = "^1.4.0"
langchain-serve = { version = ">0.0.51", optional = true }
qdrant-client = "^1.4.0"
websockets = "^10.3"
weaviate-client = "^3.23.0"
jina = "3.15.2"
sentence-transformers = { version = "^2.2.2", optional = true }
ctransformers = { version = "^0.2.10", optional = true }
cohere = "^4.21.0"
cohere = "^4.27.0"
python-multipart = "^0.0.6"
sqlmodel = "^0.0.8"
faiss-cpu = "^1.7.4"
anthropic = "^0.3.0"
orjson = "3.9.3"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
@ -78,13 +76,24 @@ psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"
fastavro = "^1.8.0"
langchain-experimental = "^0.0.8"
celery = { extras = ["redis"], version = "^5.3.1", optional = true }
redis = { version = "^4.6.0", optional = true }
flower = { version = "^2.0.0", optional = true }
alembic = "^1.12.0"
passlib = "^1.7.4"
bcrypt = "^4.0.1"
python-jose = "^3.3.0"
metaphor-python = "^0.1.11"
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
loguru = "^0.7.1"
langfuse = "^1.0.13"
pillow = "^10.0.0"
metal-sdk = "^2.0.2"
metal-sdk = "^2.2.0"
markupsafe = "^2.1.3"
[tool.poetry.group.dev.dependencies]
types-redis = "^4.6.0.5"
black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"
@ -98,11 +107,18 @@ pandas-stubs = "^2.0.0.230412"
types-pillow = "^9.5.0.2"
types-appdirs = "^1.4.3.5"
types-pyyaml = "^6.0.12.8"
types-python-jose = "^3.3.4.8"
types-passlib = "^1.7.7.13"
locust = "^2.16.1"
pytest-mock = "^3.11.1"
pytest-xdist = "^3.3.1"
types-pywin32 = "^306.0.0.4"
types-google-cloud-ndb = "^2.2.0.0"
pytest-sugar = "^0.9.7"
[tool.poetry.extras]
deploy = ["langchain-serve"]
deploy = ["langchain-serve", "celery", "redis", "flower"]
local = ["llama-cpp-python", "sentence-transformers", "ctransformers"]
all = ["deploy", "local"]
@ -114,6 +130,7 @@ testpaths = ["tests", "integration"]
console_output_style = "progress"
filterwarnings = ["ignore::DeprecationWarning"]
log_cli = true
markers = ["async_test"]
[tool.ruff]

View file

@ -11,4 +11,4 @@ RUN rm *.whl
EXPOSE 80
CMD [ "uvicorn", "--host", "0.0.0.0", "--port", "80", "langflow.backend.app:app" ]
CMD [ "uvicorn", "--host", "0.0.0.0", "--port", "7860", "--factory", "langflow.main:create_app" ]

View file

@ -1,5 +1,7 @@
from importlib import metadata
from langflow.cache import cache_manager
# Deactivate cache manager for now
# from langflow.services.cache import cache_service
from langflow.processing.process import load_flow_from_json
from langflow.interface.custom.custom_component import CustomComponent
@ -10,4 +12,4 @@ except metadata.PackageNotFoundError:
__version__ = ""
del metadata # optional, avoids polluting the results of dir(__package__)
__all__ = ["load_flow_from_json", "cache_manager", "CustomComponent"]
__all__ = ["load_flow_from_json", "cache_service", "CustomComponent"]

View file

@ -1,23 +1,67 @@
import platform
import socket
import sys
import time
import httpx
from langflow.utils.util import get_number_of_workers
from multiprocess import Process # type: ignore
import platform
import webbrowser
from pathlib import Path
from typing import Optional
import socket
from rich.panel import Panel
import httpx
import typer
from dotenv import load_dotenv
from langflow.main import setup_app
from langflow.services.database.utils import session_getter
from langflow.services.getters import get_db_service, get_settings_service
from langflow.services.utils import initialize_services, initialize_settings_service
from langflow.utils.logger import configure, logger
from multiprocess import Process, cpu_count # type: ignore
from rich import box
from rich import print as rprint
import typer
from langflow.main import setup_app
from langflow.settings import settings
from langflow.utils.logger import configure, logger
import webbrowser
from dotenv import load_dotenv
from rich.console import Console
from rich.panel import Panel
from rich.table import Table
app = typer.Typer()
console = Console()
app = typer.Typer(no_args_is_help=True)
def get_number_of_workers(workers=None):
if workers == -1 or workers is None:
workers = (cpu_count() * 2) + 1
logger.debug(f"Number of workers: {workers}")
return workers
def display_results(results):
"""
Display the results of the migration.
"""
for table_results in results:
table = Table(title=f"Migration {table_results.table_name}")
table.add_column("Name")
table.add_column("Type")
table.add_column("Status")
for result in table_results.results:
status = "Success" if result.success else "Failure"
color = "green" if result.success else "red"
table.add_row(result.name, result.type, f"[{color}]{status}[/{color}]")
console.print(table)
console.print() # Print a new line
def set_var_for_macos_issue():
# OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
# we need to set this var is we are running on MacOS
# otherwise we get an error when running gunicorn
if platform.system() in ["Darwin"]:
import os
os.environ["OBJC_DISABLE_INITIALIZE_FORK_SAFETY"] = "YES"
logger.debug("Set OBJC_DISABLE_INITIALIZE_FORK_SAFETY to YES to avoid error")
def update_settings(
@ -30,74 +74,29 @@ def update_settings(
"""Update the settings from a config file."""
# Check for database_url in the environment variables
initialize_settings_service()
settings_service = get_settings_service()
if config:
logger.debug(f"Loading settings from {config}")
settings.update_from_yaml(config, dev=dev)
settings_service.settings.update_from_yaml(config, dev=dev)
if remove_api_keys:
logger.debug(f"Setting remove_api_keys to {remove_api_keys}")
settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
settings_service.settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
if cache:
logger.debug(f"Setting cache to {cache}")
settings.update_settings(CACHE=cache)
settings_service.settings.update_settings(CACHE=cache)
if components_path:
logger.debug(f"Adding component path {components_path}")
settings.update_settings(COMPONENTS_PATH=components_path)
def serve_on_jcloud():
"""
Deploy Langflow server on Jina AI Cloud
"""
import asyncio
from importlib.metadata import version as mod_version
import click
try:
from lcserve.__main__ import serve_on_jcloud # type: ignore
except ImportError:
click.secho(
"🚨 Please install langchain-serve to deploy Langflow server on Jina AI Cloud "
"using `pip install langchain-serve`",
fg="red",
)
return
app_name = "langflow.lcserve:app"
app_dir = str(Path(__file__).parent)
version = mod_version("langflow")
base_image = "jinaai+docker://deepankarm/langflow"
click.echo("🚀 Deploying Langflow server on Jina AI Cloud")
app_id = asyncio.run(
serve_on_jcloud(
fastapi_app_str=app_name,
app_dir=app_dir,
uses=f"{base_image}:{version}",
name="langflow",
)
)
click.secho(
"🎉 Langflow server successfully deployed on Jina AI Cloud 🎉", fg="green"
)
click.secho(
"🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): ",
nl=False,
fg="green",
)
click.secho(f"https://{app_id}.wolf.jina.ai/", fg="blue")
click.secho("📖 Read more about managing the server: ", nl=False, fg="green")
click.secho("https://github.com/jina-ai/langchain-serve", fg="blue")
settings_service.settings.update_settings(COMPONENTS_PATH=components_path)
@app.command()
def serve(
def run(
host: str = typer.Option(
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
),
workers: int = typer.Option(
2, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
),
timeout: int = typer.Option(300, help="Worker timeout in seconds."),
port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"),
@ -106,7 +105,9 @@ def serve(
help="Path to the directory containing custom components.",
envvar="LANGFLOW_COMPONENTS_PATH",
),
config: str = typer.Option("config.yaml", help="Path to the configuration file."),
config: str = typer.Option(
Path(__file__).parent / "config.yaml", help="Path to the configuration file."
),
# .env file param
env_file: Path = typer.Option(
None, help="Path to the .env file containing environment variables."
@ -122,7 +123,6 @@ def serve(
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
default=None,
),
jcloud: bool = typer.Option(False, help="Deploy on Jina AI Cloud"),
dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"),
# This variable does not work but is set by the .env file
# and works with Pydantic
@ -146,17 +146,22 @@ def serve(
help="Remove API keys from the projects saved in the database.",
envvar="LANGFLOW_REMOVE_API_KEYS",
),
backend_only: bool = typer.Option(
False,
help="Run only the backend server without the frontend.",
envvar="LANGFLOW_BACKEND_ONLY",
),
):
"""
Run the Langflow server.
Run the Langflow.
"""
set_var_for_macos_issue()
# override env variables with .env file
if env_file:
load_dotenv(env_file, override=True)
if jcloud:
return serve_on_jcloud()
configure(log_level=log_level, log_file=log_file)
update_settings(
config,
@ -167,7 +172,7 @@ def serve(
)
# create path object if path is provided
static_files_dir: Optional[Path] = Path(path) if path else None
app = setup_app(static_files_dir=static_files_dir)
app = setup_app(static_files_dir=static_files_dir, backend_only=backend_only)
# check if port is being used
if is_port_in_use(port, host):
port = get_free_port(port)
@ -175,10 +180,13 @@ def serve(
options = {
"bind": f"{host}:{port}",
"workers": get_number_of_workers(workers),
"worker_class": "uvicorn.workers.UvicornWorker",
"timeout": timeout,
}
# Define an env variable to know if we are just testing the server
if "pytest" in sys.modules:
return
if platform.system() in ["Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
@ -299,6 +307,53 @@ def run_langflow(host, port, log_level, options, app):
sys.exit(1)
@app.command()
def superuser(
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
password: str = typer.Option(
..., prompt=True, hide_input=True, help="Password for the superuser."
),
log_level: str = typer.Option(
"critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
),
):
"""
Create a superuser.
"""
configure(log_level=log_level)
initialize_services()
db_service = get_db_service()
with session_getter(db_service) as session:
from langflow.services.auth.utils import create_super_user
if create_super_user(db=session, username=username, password=password):
# Verify that the superuser was created
from langflow.services.database.models.user.user import User
user: User = session.query(User).filter(User.username == username).first()
if user is None or not user.is_superuser:
typer.echo("Superuser creation failed.")
return
typer.echo("Superuser created successfully.")
else:
typer.echo("Superuser creation failed.")
@app.command()
def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")):
"""
Run or test migrations.
"""
initialize_services()
db_service = get_db_service()
if not test:
db_service.run_migrations()
results = db_service.run_migrations_test()
display_results(results)
def main():
app()

View file

@ -0,0 +1,113 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# This is the path to the db in the root of the project.
# When the user runs the Langflow the database url will
# be set dinamically.
sqlalchemy.url = sqlite:///../../../langflow.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View file

@ -0,0 +1 @@
Generic single-database configuration.

View file

@ -0,0 +1,81 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from langflow.services.database.manager import SQLModel
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = SQLModel.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
render_as_batch=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata, render_as_batch=True
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View file

@ -0,0 +1,27 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,177 @@
"""Adds tables
Revision ID: 260dbcc8b680
Revises:
Create Date: 2023-08-27 19:49:02.681355
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = "260dbcc8b680"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
# List existing tables
existing_tables = inspector.get_table_names()
# Drop 'flowstyle' table if it exists
# and other related indices
if "flowstyle" in existing_tables:
op.drop_table("flowstyle")
if "ix_flowstyle_flow_id" in [
index["name"] for index in inspector.get_indexes("flowstyle")
]:
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle")
existing_indices_flow = []
existing_fks_flow = []
if "flow" in existing_tables:
existing_indices_flow = [
index["name"] for index in inspector.get_indexes("flow")
]
# Existing foreign keys for the 'flow' table, if it exists
existing_fks_flow = [
fk["referred_table"] + "." + fk["referred_columns"][0]
for fk in inspector.get_foreign_keys("flow")
]
# Now check if the columns user_id exists in the 'flow' table
# If it does not exist, we need to create the foreign key
if "user" not in existing_tables:
op.create_table(
"user",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("username", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("password", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("is_superuser", sa.Boolean(), nullable=False),
sa.Column("create_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.Column("last_login_at", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.create_index(
batch_op.f("ix_user_username"), ["username"], unique=True
)
if "apikey" not in existing_tables:
op.create_table(
"apikey",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("last_used_at", sa.DateTime(), nullable=True),
sa.Column("total_uses", sa.Integer(), nullable=False, default=0),
sa.Column("is_active", sa.Boolean(), nullable=False, default=True),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.create_index(
batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True
)
batch_op.create_index(batch_op.f("ix_apikey_name"), ["name"], unique=False)
batch_op.create_index(
batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False
)
if "flow" not in existing_tables:
op.create_table(
"flow",
sa.Column("data", sa.JSON(), nullable=True),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
# Conditionally create indices for 'flow' table
# if _alembic_tmp_flow exists, then we need to drop it first
# This is to deal with SQLite not being able to ROLLBACK
# for some unknown reason
if "_alembic_tmp_flow" in existing_tables:
op.drop_table("_alembic_tmp_flow")
with op.batch_alter_table("flow", schema=None) as batch_op:
flow_columns = [col["name"] for col in inspector.get_columns("flow")]
if "user_id" not in flow_columns:
batch_op.add_column(
sa.Column(
"user_id",
sqlmodel.sql.sqltypes.GUID(),
nullable=True, # This should be False, but we need to allow NULL values for now
)
)
if "user.id" not in existing_fks_flow:
batch_op.create_foreign_key("fk_flow_user_id", "user", ["user_id"], ["id"])
if "ix_flow_description" not in existing_indices_flow:
batch_op.create_index(
batch_op.f("ix_flow_description"), ["description"], unique=False
)
if "ix_flow_name" not in existing_indices_flow:
batch_op.create_index(batch_op.f("ix_flow_name"), ["name"], unique=False)
with op.batch_alter_table("flow", schema=None) as batch_op:
if "ix_flow_user_id" not in existing_indices_flow:
batch_op.create_index(
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
# List existing tables
existing_tables = inspector.get_table_names()
if "flow" in existing_tables:
with op.batch_alter_table("flow", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_flow_user_id"))
batch_op.drop_index(batch_op.f("ix_flow_name"))
batch_op.drop_index(batch_op.f("ix_flow_description"))
op.drop_table("flow")
if "apikey" in existing_tables:
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_apikey_user_id"))
batch_op.drop_index(batch_op.f("ix_apikey_name"))
batch_op.drop_index(batch_op.f("ix_apikey_api_key"))
op.drop_table("apikey")
if "user" in existing_tables:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_user_username"))
op.drop_table("user")
if "flowstyle" in existing_tables:
op.drop_table("flowstyle")
if "component" in existing_tables:
op.drop_table("component")
# ### end Alembic commands ###

View file

@ -0,0 +1,49 @@
"""Add profile-image column
Revision ID: 67cc006d50bf
Revises: 260dbcc8b680
Create Date: 2023-09-08 07:36:13.387318
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = "67cc006d50bf"
down_revision: Union[str, None] = "260dbcc8b680"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
if "user" in inspector.get_table_names() and "profile_image" not in [
column["name"] for column in inspector.get_columns("user")
]:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"profile_image", sqlmodel.sql.sqltypes.AutoString(), nullable=True
)
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
if "user" in inspector.get_table_names() and "profile_image" in [
column["name"] for column in inspector.get_columns("user")
]:
with op.batch_alter_table("user", schema=None) as batch_op:
batch_op.drop_column("profile_image")
# ### end Alembic commands ###

View file

@ -0,0 +1,79 @@
"""Change columns to be nullable
Revision ID: eb5866d51fd2
Revises: 67cc006d50bf
Create Date: 2023-10-04 10:18:25.640458
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
import sqlmodel # noqa: F401
# revision identifiers, used by Alembic.
revision: str = "eb5866d51fd2"
down_revision: Union[str, None] = "67cc006d50bf"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.drop_table("flowstyle")
with op.batch_alter_table("component", schema=None) as batch_op:
batch_op.drop_index("ix_component_frontend_node_id")
batch_op.drop_index("ix_component_name")
except Exception:
pass
try:
op.drop_table("component")
except Exception:
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.create_table(
"component",
sa.Column("id", sa.CHAR(length=32), nullable=False),
sa.Column("frontend_node_id", sa.CHAR(length=32), nullable=False),
sa.Column("name", sa.VARCHAR(), nullable=False),
sa.Column("description", sa.VARCHAR(), nullable=True),
sa.Column("python_code", sa.VARCHAR(), nullable=True),
sa.Column("return_type", sa.VARCHAR(), nullable=True),
sa.Column("is_disabled", sa.BOOLEAN(), nullable=False),
sa.Column("is_read_only", sa.BOOLEAN(), nullable=False),
sa.Column("create_at", sa.DATETIME(), nullable=False),
sa.Column("update_at", sa.DATETIME(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
with op.batch_alter_table("component", schema=None) as batch_op:
batch_op.create_index("ix_component_name", ["name"], unique=False)
batch_op.create_index(
"ix_component_frontend_node_id", ["frontend_node_id"], unique=False
)
except Exception:
pass
try:
op.create_table(
"flowstyle",
sa.Column("color", sa.VARCHAR(), nullable=False),
sa.Column("emoji", sa.VARCHAR(), nullable=False),
sa.Column("flow_id", sa.CHAR(length=32), nullable=True),
sa.Column("id", sa.CHAR(length=32), nullable=False),
sa.ForeignKeyConstraint(
["flow_id"],
["flow.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
except Exception:
pass
# ### end Alembic commands ###

View file

@ -5,8 +5,10 @@ from langflow.api.v1 import (
endpoints_router,
validate_router,
flows_router,
flow_styles_router,
component_router,
users_router,
api_key_router,
login_router,
)
router = APIRouter(
@ -17,4 +19,6 @@ router.include_router(endpoints_router)
router.include_router(validate_router)
router.include_router(component_router)
router.include_router(flows_router)
router.include_router(flow_styles_router)
router.include_router(users_router)
router.include_router(api_key_router)
router.include_router(login_router)

View file

@ -59,33 +59,6 @@ def build_input_keys_response(langchain_object, artifacts):
return input_keys_response
def merge_nested_dicts(dict1, dict2):
for key, value in dict2.items():
if isinstance(value, dict) and isinstance(dict1.get(key), dict):
dict1[key] = merge_nested_dicts(dict1[key], value)
else:
dict1[key] = value
return dict1
def merge_nested_dicts_with_renaming(dict1, dict2):
for key, value in dict2.items():
if (
key in dict1
and isinstance(value, dict)
and isinstance(dict1.get(key), dict)
):
for sub_key, sub_value in value.items():
if sub_key in dict1[key]:
new_key = get_new_key(dict1[key], sub_key)
dict1[key][new_key] = sub_value
else:
dict1[key][sub_key] = sub_value
else:
dict1[key] = value
return dict1
def get_new_key(dictionary, original_key):
counter = 1
new_key = original_key + " (" + str(counter) + ")"

View file

@ -2,8 +2,10 @@ from langflow.api.v1.endpoints import router as endpoints_router
from langflow.api.v1.validate import router as validate_router
from langflow.api.v1.chat import router as chat_router
from langflow.api.v1.flows import router as flows_router
from langflow.api.v1.flow_styles import router as flow_styles_router
from langflow.api.v1.components import router as component_router
from langflow.api.v1.users import router as users_router
from langflow.api.v1.api_key import router as api_key_router
from langflow.api.v1.login import router as login_router
__all__ = [
"chat_router",
@ -11,5 +13,7 @@ __all__ = [
"component_router",
"validate_router",
"flows_router",
"flow_styles_router",
"users_router",
"api_key_router",
"login_router",
]

View file

@ -0,0 +1,61 @@
from uuid import UUID
from fastapi import APIRouter, HTTPException, Depends
from langflow.api.v1.schemas import ApiKeysResponse
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.api_key.api_key import (
ApiKeyCreate,
UnmaskedApiKeyRead,
)
# Assuming you have these methods in your service layer
from langflow.services.database.models.api_key.crud import (
get_api_keys,
create_api_key,
delete_api_key,
)
from langflow.services.database.models.user.user import User
from langflow.services.getters import get_session
from sqlmodel import Session
router = APIRouter(tags=["APIKey"], prefix="/api_key")
@router.get("/", response_model=ApiKeysResponse)
def get_api_keys_route(
db: Session = Depends(get_session),
current_user: User = Depends(get_current_active_user),
):
try:
user_id = current_user.id
keys = get_api_keys(db, user_id)
return ApiKeysResponse(total_count=len(keys), user_id=user_id, api_keys=keys)
except Exception as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
@router.post("/", response_model=UnmaskedApiKeyRead)
def create_api_key_route(
req: ApiKeyCreate,
current_user: User = Depends(get_current_active_user),
db: Session = Depends(get_session),
):
try:
user_id = current_user.id
return create_api_key(db, req, user_id=user_id)
except Exception as e:
raise HTTPException(status_code=400, detail=str(e)) from e
@router.delete("/{api_key_id}")
def delete_api_key_route(
api_key_id: UUID,
current_user=Depends(get_current_active_user),
db: Session = Depends(get_session),
):
try:
delete_api_key(db, api_key_id)
return {"detail": "API Key deleted"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e)) from e

View file

@ -1,3 +1,4 @@
from typing import Optional
from langflow.template.frontend_node.base import FrontendNode
from pydantic import BaseModel, validator
@ -20,7 +21,8 @@ class FrontendNodeRequest(FrontendNode):
class ValidatePromptRequest(BaseModel):
name: str
template: str
frontend_node: FrontendNodeRequest
# optional for tweak call
frontend_node: Optional[FrontendNodeRequest] = None
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
@ -39,7 +41,8 @@ class CodeValidationResponse(BaseModel):
class PromptValidationResponse(BaseModel):
input_variables: list
frontend_node: FrontendNodeRequest
# object return for tweak call
frontend_node: Optional[FrontendNodeRequest] = None
INVALID_CHARACTERS = {

View file

@ -1,55 +1,33 @@
import asyncio
from uuid import UUID
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langflow.api.v1.schemas import ChatResponse
from langflow.api.v1.schemas import ChatResponse, PromptResponse
from typing import Any, Dict, List, Union
from fastapi import WebSocket
from typing import Any, Dict, List, Optional
from langflow.services.getters import get_chat_service
from langchain.schema import AgentAction, LLMResult, AgentFinish
from langflow.utils.logger import logger
from langflow.utils.util import remove_ansi_escape_codes
from langchain.schema import AgentAction, AgentFinish
from loguru import logger
# https://github.com/hwchase17/chat-langchain/blob/master/callback.py
class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
"""Callback handler for streaming LLM responses."""
def __init__(self, websocket: WebSocket):
self.websocket = websocket
def __init__(self, client_id: str):
self.chat_service = get_chat_service()
self.client_id = client_id
self.websocket = self.chat_service.active_connections[self.client_id]
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
resp = ChatResponse(message=token, type="stream", intermediate_steps="")
await self.websocket.send_json(resp.dict())
async def on_llm_start(
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
) -> Any:
"""Run when LLM starts running."""
async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> Any:
"""Run when LLM ends running."""
async def on_llm_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> Any:
"""Run when LLM errors."""
async def on_chain_start(
self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any
) -> Any:
"""Run when chain starts running."""
async def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> Any:
"""Run when chain ends running."""
async def on_chain_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> Any:
"""Run when chain errors."""
async def on_tool_start(
self, serialized: Dict[str, Any], input_str: str, **kwargs: Any
) -> Any:
@ -95,8 +73,14 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
logger.error(f"Error sending response: {exc}")
async def on_tool_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> Any:
self,
error: BaseException,
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
tags: Optional[List[str]] = None,
**kwargs: Any,
) -> None:
"""Run when tool errors."""
async def on_text(self, text: str, **kwargs: Any) -> Any:
@ -104,6 +88,14 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
# This runs when first sending the prompt
# to the LLM, adding it will send the final prompt
# to the frontend
if "Prompt after formatting" in text:
text = text.replace("Prompt after formatting:\n", "")
text = remove_ansi_escape_codes(text)
resp = PromptResponse(
prompt=text,
)
await self.websocket.send_json(resp.dict())
self.chat_service.chat_history.add_message(self.client_id, resp)
async def on_agent_action(self, action: AgentAction, **kwargs: Any):
log = f"Thought: {action.log}"
@ -131,8 +123,10 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
class StreamingLLMCallbackHandler(BaseCallbackHandler):
"""Callback handler for streaming LLM responses."""
def __init__(self, websocket):
self.websocket = websocket
def __init__(self, client_id: str):
self.chat_service = get_chat_service()
self.client_id = client_id
self.websocket = self.chat_service.active_connections[self.client_id]
def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
resp = ChatResponse(message=token, type="stream", intermediate_steps="")

View file

@ -1,57 +1,99 @@
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketException, status
from fastapi import (
APIRouter,
Depends,
HTTPException,
Query,
WebSocket,
WebSocketException,
status,
)
from fastapi.responses import StreamingResponse
from langflow.api.utils import build_input_keys_response
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
from langflow.chat.manager import ChatManager
from langflow.graph.graph.base import Graph
from langflow.utils.logger import logger
from cachetools import LRUCache
from langflow.services.auth.utils import get_current_active_user, get_current_user
from langflow.services.cache.utils import update_build_status
from loguru import logger
from langflow.services.getters import get_chat_service, get_session, get_cache_service
from sqlmodel import Session
from langflow.services.chat.manager import ChatService
from langflow.services.cache.manager import BaseCacheService
router = APIRouter(tags=["Chat"])
chat_manager = ChatManager()
flow_data_store: LRUCache = LRUCache(maxsize=10)
@router.websocket("/chat/{client_id}")
async def chat(client_id: str, websocket: WebSocket):
async def chat(
client_id: str,
websocket: WebSocket,
token: str = Query(...),
db: Session = Depends(get_session),
chat_service: "ChatService" = Depends(get_chat_service),
):
"""Websocket endpoint for chat."""
try:
if client_id in chat_manager.in_memory_cache:
await chat_manager.handle_websocket(client_id, websocket)
await websocket.accept()
user = await get_current_user(token, db)
if not user:
await websocket.close(
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
)
if not user.is_active:
await websocket.close(
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
)
if client_id in chat_service.cache_service:
await chat_service.handle_websocket(client_id, websocket)
else:
# We accept the connection but close it immediately
# if the flow is not built yet
await websocket.accept()
message = "Please, build the flow before sending messages"
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=message)
except WebSocketException as exc:
logger.error(f"Websocket error: {exc}")
logger.error(f"Websocket exrror: {exc}")
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc))
except Exception as exc:
logger.error(f"Error in chat websocket: {exc}")
messsage = exc.detail if isinstance(exc, HTTPException) else str(exc)
if "Could not validate credentials" in str(exc):
await websocket.close(
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
)
else:
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=messsage)
@router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201)
async def init_build(graph_data: dict, flow_id: str):
async def init_build(
graph_data: dict,
flow_id: str,
current_user=Depends(get_current_active_user),
chat_service: "ChatService" = Depends(get_chat_service),
cache_service: "BaseCacheService" = Depends(get_cache_service),
):
"""Initialize the build by storing graph data and returning a unique session ID."""
try:
if flow_id is None:
raise ValueError("No ID provided")
# Check if already building
if (
flow_id in flow_data_store
and flow_data_store[flow_id]["status"] == BuildStatus.IN_PROGRESS
flow_id in cache_service
and isinstance(cache_service[flow_id], dict)
and cache_service[flow_id].get("status") == BuildStatus.IN_PROGRESS
):
return InitResponse(flowId=flow_id)
# Delete from cache if already exists
if flow_id in chat_manager.in_memory_cache:
with chat_manager.in_memory_cache._lock:
chat_manager.in_memory_cache.delete(flow_id)
logger.debug(f"Deleted flow {flow_id} from cache")
flow_data_store[flow_id] = {
if flow_id in chat_service.cache_service:
chat_service.cache_service.delete(flow_id)
logger.debug(f"Deleted flow {flow_id} from cache")
cache_service[flow_id] = {
"graph_data": graph_data,
"status": BuildStatus.STARTED,
"user_id": current_user.id,
}
return InitResponse(flowId=flow_id)
@ -61,12 +103,14 @@ async def init_build(graph_data: dict, flow_id: str):
@router.get("/build/{flow_id}/status", response_model=BuiltResponse)
async def build_status(flow_id: str):
"""Check the flow_id is in the flow_data_store."""
async def build_status(
flow_id: str, cache_service: "BaseCacheService" = Depends(get_cache_service)
):
"""Check the flow_id is in the cache_service."""
try:
built = (
flow_id in flow_data_store
and flow_data_store[flow_id]["status"] == BuildStatus.SUCCESS
flow_id in cache_service
and cache_service[flow_id]["status"] == BuildStatus.SUCCESS
)
return BuiltResponse(
@ -79,24 +123,29 @@ async def build_status(flow_id: str):
@router.get("/build/stream/{flow_id}", response_class=StreamingResponse)
async def stream_build(flow_id: str):
async def stream_build(
flow_id: str,
chat_service: "ChatService" = Depends(get_chat_service),
cache_service: "BaseCacheService" = Depends(get_cache_service),
):
"""Stream the build process based on stored flow data."""
async def event_stream(flow_id):
final_response = {"end_of_stream": True}
artifacts = {}
try:
if flow_id not in flow_data_store:
if flow_id not in cache_service:
error_message = "Invalid session ID"
yield str(StreamData(event="error", data={"error": error_message}))
return
if flow_data_store[flow_id].get("status") == BuildStatus.IN_PROGRESS:
if cache_service[flow_id].get("status") == BuildStatus.IN_PROGRESS:
error_message = "Already building"
yield str(StreamData(event="error", data={"error": error_message}))
return
graph_data = flow_data_store[flow_id].get("graph_data")
graph_data = cache_service[flow_id].get("graph_data")
cache_service[flow_id]["user_id"]
if not graph_data:
error_message = "No data provided"
@ -109,7 +158,7 @@ async def stream_build(flow_id: str):
graph = Graph.from_payload(graph_data)
number_of_nodes = len(graph.nodes)
flow_data_store[flow_id]["status"] = BuildStatus.IN_PROGRESS
update_build_status(cache_service, flow_id, BuildStatus.IN_PROGRESS)
for i, vertex in enumerate(graph.generator_build(), 1):
try:
@ -117,7 +166,10 @@ async def stream_build(flow_id: str):
"log": f"Building node {vertex.vertex_type}",
}
yield str(StreamData(event="log", data=log_dict))
vertex.build()
if vertex.is_task:
vertex = try_running_celery_task(vertex)
else:
vertex.build()
params = vertex._built_object_repr()
valid = True
logger.debug(f"Building node {str(vertex.vertex_type)}")
@ -133,7 +185,7 @@ async def stream_build(flow_id: str):
logger.exception(exc)
params = str(exc)
valid = False
flow_data_store[flow_id]["status"] = BuildStatus.FAILURE
update_build_status(cache_service, flow_id, BuildStatus.FAILURE)
response = {
"valid": valid,
@ -157,15 +209,15 @@ async def stream_build(flow_id: str):
"handle_keys": [],
}
yield str(StreamData(event="message", data=input_keys_response))
chat_manager.set_cache(flow_id, langchain_object)
chat_service.set_cache(flow_id, langchain_object)
# We need to reset the chat history
chat_manager.chat_history.empty_history(flow_id)
flow_data_store[flow_id]["status"] = BuildStatus.SUCCESS
chat_service.chat_history.empty_history(flow_id)
update_build_status(cache_service, flow_id, BuildStatus.SUCCESS)
except Exception as exc:
logger.exception(exc)
logger.error("Error while building the flow: %s", exc)
flow_data_store[flow_id]["status"] = BuildStatus.FAILURE
update_build_status(cache_service, flow_id, BuildStatus.FAILURE)
yield str(StreamData(event="error", data={"error": str(exc)}))
finally:
yield str(StreamData(event="message", data=final_response))
@ -175,3 +227,19 @@ async def stream_build(flow_id: str):
except Exception as exc:
logger.error(f"Error streaming build: {exc}")
raise HTTPException(status_code=500, detail=str(exc))
def try_running_celery_task(vertex):
# Try running the task in celery
# and set the task_id to the local vertex
# if it fails, run the task locally
try:
from langflow.worker import build_vertex
task = build_vertex.delay(vertex)
vertex.task_id = task.id
except Exception as exc:
logger.debug(f"Error running task in celery: {exc}")
vertex.task_id = None
vertex.build()
return vertex

View file

@ -1,8 +1,8 @@
from datetime import timezone
from typing import List
from uuid import UUID
from langflow.database.models.component import Component, ComponentModel
from langflow.database.base import get_session
from langflow.services.database.models.component import Component, ComponentModel
from langflow.services.getters import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.exc import IntegrityError

View file

@ -1,89 +1,103 @@
from http import HTTPStatus
from typing import Annotated, Optional
from typing import Annotated, Optional, Union
from langflow.services.auth.utils import api_key_security, get_current_active_user
from langflow.cache.utils import save_uploaded_file
from langflow.database.models.flow import Flow
from langflow.services.cache.utils import save_uploaded_file
from langflow.services.database.models.flow import Flow
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.utils.logger import logger
from langflow.settings import settings
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body
from langflow.services.database.models.user.user import User
from langflow.services.getters import (
get_session_service,
get_settings_service,
get_task_service,
)
from loguru import logger
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body, status
import sqlalchemy as sa
from langflow.interface.custom.custom_component import CustomComponent
from langflow.api.v1.schemas import (
ProcessResponse,
TaskResponse,
TaskStatusResponse,
UploadFileResponse,
CustomComponentCode,
)
from langflow.api.utils import merge_nested_dicts_with_renaming
from langflow.interface.types import (
build_langchain_types_dict,
build_langchain_template_custom_component,
build_langchain_custom_component_list_from_path,
)
from langflow.services.getters import get_session
try:
from langflow.worker import process_graph_cached_task
except ImportError:
def process_graph_cached_task(*args, **kwargs):
raise NotImplementedError("Celery is not installed")
from langflow.database.base import get_session
from sqlmodel import Session
from langflow.services.task.manager import TaskService
# build router
router = APIRouter(tags=["Base"])
@router.get("/all")
def get_all():
@router.get("/all", dependencies=[Depends(get_current_active_user)])
def get_all(
settings_service=Depends(get_settings_service),
):
from langflow.interface.types import get_all_types_dict
logger.debug("Building langchain types dict")
native_components = build_langchain_types_dict()
# custom_components is a list of dicts
# need to merge all the keys into one dict
custom_components_from_file = {}
if settings.COMPONENTS_PATH:
logger.info(f"Building custom components from {settings.COMPONENTS_PATH}")
custom_component_dicts = []
processed_paths = []
for path in settings.COMPONENTS_PATH:
if str(path) in processed_paths:
continue
custom_component_dict = build_langchain_custom_component_list_from_path(
str(path)
)
custom_component_dicts.append(custom_component_dict)
processed_paths.append(str(path))
logger.info(f"Loading {len(custom_component_dicts)} category(ies)")
for custom_component_dict in custom_component_dicts:
logger.debug(
{key: len(value) for key, value in custom_component_dict.items()}
)
custom_components_from_file = merge_nested_dicts_with_renaming(
custom_components_from_file, custom_component_dict
)
return merge_nested_dicts_with_renaming(
native_components, custom_components_from_file
)
try:
return get_all_types_dict(settings_service)
except Exception as exc:
raise HTTPException(status_code=500, detail=str(exc)) from exc
# For backwards compatibility we will keep the old endpoint
@router.post("/predict/{flow_id}", response_model=ProcessResponse)
@router.post("/process/{flow_id}", response_model=ProcessResponse)
@router.post(
"/predict/{flow_id}",
response_model=ProcessResponse,
dependencies=[Depends(api_key_security)],
)
@router.post(
"/process/{flow_id}",
response_model=ProcessResponse,
)
async def process_flow(
session: Annotated[Session, Depends(get_session)],
flow_id: str,
inputs: Optional[dict] = None,
tweaks: Optional[dict] = None,
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
session: Session = Depends(get_session),
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
task_service: "TaskService" = Depends(get_task_service),
api_key_user: User = Depends(api_key_security),
sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821
):
"""
Endpoint to process an input with a given flow_id.
"""
try:
flow = session.get(Flow, flow_id)
if api_key_user is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
# Get the flow that matches the flow_id and belongs to the user
flow = (
session.query(Flow)
.filter(Flow.id == flow_id)
.filter(Flow.user_id == api_key_user.id)
.first()
)
if flow is None:
raise ValueError(f"Flow {flow_id} not found")
@ -95,16 +109,94 @@ async def process_flow(
graph_data = process_tweaks(graph_data, tweaks)
except Exception as exc:
logger.error(f"Error processing tweaks: {exc}")
response = process_graph_cached(graph_data, inputs, clear_cache)
if sync:
task_id, result = await task_service.launch_and_await_task(
process_graph_cached_task
if task_service.use_celery
else process_graph_cached,
graph_data,
inputs,
clear_cache,
session_id,
)
if isinstance(result, dict) and "result" in result:
task_result = result["result"]
session_id = result["session_id"]
elif hasattr(result, "result") and hasattr(result, "session_id"):
task_result = result.result
session_id = result.session_id
else:
logger.warning(
"This is an experimental feature and may not work as expected."
"Please report any issues to our GitHub repository."
)
if session_id is None:
# Generate a session ID
session_id = get_session_service().generate_key(
session_id=session_id, data_graph=graph_data
)
task_id, task = await task_service.launch_task(
process_graph_cached_task
if task_service.use_celery
else process_graph_cached,
graph_data,
inputs,
clear_cache,
session_id,
)
task_result = task.status
if task_id:
task_response = TaskResponse(id=task_id, href=f"api/v1/task/{task_id}")
else:
task_response = None
return ProcessResponse(
result=response,
result=task_result,
task=task_response,
session_id=session_id,
backend=task_service.backend_name,
)
except sa.exc.StatementError as exc:
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
if "badly formed hexadecimal UUID string" in str(exc):
# This means the Flow ID is not a valid UUID which means it can't find the flow
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
) from exc
except ValueError as exc:
if f"Flow {flow_id} not found" in str(exc):
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
) from exc
else:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)
) from exc
except Exception as e:
# Log stack trace
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
@router.get("/task/{task_id}", response_model=TaskStatusResponse)
async def get_task_status(task_id: str):
task_service = get_task_service()
task = task_service.get_task(task_id)
result = None
if task.ready():
result = task.result
if isinstance(result, dict) and "result" in result:
result = result["result"]
elif hasattr(result, "result"):
result = result.result
if task is None:
raise HTTPException(status_code=404, detail="Task not found")
return TaskStatusResponse(status=task.status, result=result)
@router.post(
"/upload/{flow_id}",
response_model=UploadFileResponse,
@ -113,7 +205,7 @@ async def process_flow(
async def create_upload_file(file: UploadFile, flow_id: str):
# Cache file
try:
file_path = save_uploaded_file(file.file, folder_name=flow_id)
file_path = save_uploaded_file(file, folder_name=flow_id)
return UploadFileResponse(
flowId=flow_id,
@ -136,6 +228,10 @@ def get_version():
async def custom_component(
raw_code: CustomComponentCode,
):
from langflow.interface.types import (
build_langchain_template_custom_component,
)
extractor = CustomComponent(code=raw_code.code)
extractor.is_check_valid()

View file

@ -1,83 +0,0 @@
from uuid import UUID
from langflow.database.models.flow_style import (
FlowStyle,
FlowStyleCreate,
FlowStyleRead,
FlowStyleUpdate,
)
from langflow.database.base import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
# build router
router = APIRouter(prefix="/flow_styles", tags=["FlowStyles"])
# FlowStyleCreate:
# class FlowStyleBase(SQLModel):
# color: str = Field(index=True)
# emoji: str = Field(index=False)
# flow_id: UUID = Field(default=None, foreign_key="flow.id")
@router.post("/", response_model=FlowStyleRead)
def create_flow_style(
*, session: Session = Depends(get_session), flow_style: FlowStyleCreate
):
"""Create a new flow_style."""
db_flow_style = FlowStyle.from_orm(flow_style)
session.add(db_flow_style)
session.commit()
session.refresh(db_flow_style)
return db_flow_style
@router.get("/", response_model=list[FlowStyleRead])
def read_flow_styles(*, session: Session = Depends(get_session)):
"""Read all flows."""
try:
flows = session.exec(select(FlowStyle)).all()
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e
return flows
@router.get("/{flow_styles_id}", response_model=FlowStyleRead)
def read_flow_style(*, session: Session = Depends(get_session), flow_styles_id: UUID):
"""Read a flow_style."""
if flow_style := session.get(FlowStyle, flow_styles_id):
return flow_style
else:
raise HTTPException(status_code=404, detail="FlowStyle not found")
@router.patch("/{flow_style_id}", response_model=FlowStyleRead)
def update_flow_style(
*,
session: Session = Depends(get_session),
flow_style_id: UUID,
flow_style: FlowStyleUpdate,
):
"""Update a flow_style."""
db_flow_style = session.get(FlowStyle, flow_style_id)
if not db_flow_style:
raise HTTPException(status_code=404, detail="FlowStyle not found")
flow_data = flow_style.dict(exclude_unset=True)
for key, value in flow_data.items():
if hasattr(db_flow_style, key) and value is not None:
setattr(db_flow_style, key, value)
session.add(db_flow_style)
session.commit()
session.refresh(db_flow_style)
return db_flow_style
@router.delete("/{flow_id}")
def delete_flow_style(*, session: Session = Depends(get_session), flow_id: UUID):
"""Delete a flow_style."""
flow_style = session.get(FlowStyle, flow_id)
if not flow_style:
raise HTTPException(status_code=404, detail="FlowStyle not found")
session.delete(flow_style)
session.commit()
return {"message": "FlowStyle deleted successfully"}

View file

@ -1,19 +1,21 @@
from typing import List
from uuid import UUID
from fastapi.encoders import jsonable_encoder
from langflow.settings import settings
from langflow.api.utils import remove_api_keys
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.database.models.flow import (
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.flow import (
Flow,
FlowCreate,
FlowRead,
FlowReadWithStyle,
FlowUpdate,
)
from langflow.database.base import get_session
from langflow.services.database.models.user.user import User
from langflow.services.getters import get_session
from langflow.services.getters import get_settings_service
import orjson
from sqlmodel import Session, select
from sqlmodel import Session
from fastapi import APIRouter, Depends, HTTPException
from fastapi import File, UploadFile
@ -23,48 +25,77 @@ router = APIRouter(prefix="/flows", tags=["Flows"])
@router.post("/", response_model=FlowRead, status_code=201)
def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
def create_flow(
*,
session: Session = Depends(get_session),
flow: FlowCreate,
current_user: User = Depends(get_current_active_user),
):
"""Create a new flow."""
if flow.user_id is None:
flow.user_id = current_user.id
db_flow = Flow.from_orm(flow)
session.add(db_flow)
session.commit()
session.refresh(db_flow)
return db_flow
@router.get("/", response_model=list[FlowReadWithStyle], status_code=200)
def read_flows(*, session: Session = Depends(get_session)):
@router.get("/", response_model=list[FlowRead], status_code=200)
def read_flows(
*,
session: Session = Depends(get_session),
current_user: User = Depends(get_current_active_user),
):
"""Read all flows."""
try:
flows = session.exec(select(Flow)).all()
flows = current_user.flows
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e
return [jsonable_encoder(flow) for flow in flows]
@router.get("/{flow_id}", response_model=FlowReadWithStyle, status_code=200)
def read_flow(*, session: Session = Depends(get_session), flow_id: UUID):
@router.get("/{flow_id}", response_model=FlowRead, status_code=200)
def read_flow(
*,
session: Session = Depends(get_session),
flow_id: UUID,
current_user: User = Depends(get_current_active_user),
):
"""Read a flow."""
if flow := session.get(Flow, flow_id):
return flow
if user_flow := (
session.query(Flow)
.filter(Flow.id == flow_id)
.filter(Flow.user_id == current_user.id)
.first()
):
return user_flow
else:
raise HTTPException(status_code=404, detail="Flow not found")
@router.patch("/{flow_id}", response_model=FlowRead, status_code=200)
def update_flow(
*, session: Session = Depends(get_session), flow_id: UUID, flow: FlowUpdate
*,
session: Session = Depends(get_session),
flow_id: UUID,
flow: FlowUpdate,
current_user: User = Depends(get_current_active_user),
settings_service=Depends(get_settings_service),
):
"""Update a flow."""
db_flow = session.get(Flow, flow_id)
db_flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
if not db_flow:
raise HTTPException(status_code=404, detail="Flow not found")
flow_data = flow.dict(exclude_unset=True)
if settings.REMOVE_API_KEYS:
if settings_service.settings.REMOVE_API_KEYS:
flow_data = remove_api_keys(flow_data)
for key, value in flow_data.items():
setattr(db_flow, key, value)
if value is not None:
setattr(db_flow, key, value)
session.add(db_flow)
session.commit()
session.refresh(db_flow)
@ -72,9 +103,14 @@ def update_flow(
@router.delete("/{flow_id}", status_code=200)
def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
def delete_flow(
*,
session: Session = Depends(get_session),
flow_id: UUID,
current_user: User = Depends(get_current_active_user),
):
"""Delete a flow."""
flow = session.get(Flow, flow_id)
flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
if not flow:
raise HTTPException(status_code=404, detail="Flow not found")
session.delete(flow)
@ -86,10 +122,16 @@ def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
@router.post("/batch/", response_model=List[FlowRead], status_code=201)
def create_flows(*, session: Session = Depends(get_session), flow_list: FlowListCreate):
def create_flows(
*,
session: Session = Depends(get_session),
flow_list: FlowListCreate,
current_user: User = Depends(get_current_active_user),
):
"""Create multiple new flows."""
db_flows = []
for flow in flow_list.flows:
flow.user_id = current_user.id
db_flow = Flow.from_orm(flow)
session.add(db_flow)
db_flows.append(db_flow)
@ -101,7 +143,10 @@ def create_flows(*, session: Session = Depends(get_session), flow_list: FlowList
@router.post("/upload/", response_model=List[FlowRead], status_code=201)
async def upload_file(
*, session: Session = Depends(get_session), file: UploadFile = File(...)
*,
session: Session = Depends(get_session),
file: UploadFile = File(...),
current_user: User = Depends(get_current_active_user),
):
"""Upload flows from a file."""
contents = await file.read()
@ -110,11 +155,19 @@ async def upload_file(
flow_list = FlowListCreate(**data)
else:
flow_list = FlowListCreate(flows=[FlowCreate(**flow) for flow in data])
return create_flows(session=session, flow_list=flow_list)
# Now we set the user_id for all flows
for flow in flow_list.flows:
flow.user_id = current_user.id
return create_flows(session=session, flow_list=flow_list, current_user=current_user)
@router.get("/download/", response_model=FlowListRead, status_code=200)
async def download_file(*, session: Session = Depends(get_session)):
async def download_file(
*,
session: Session = Depends(get_session),
current_user: User = Depends(get_current_active_user),
):
"""Download all flows as a file."""
flows = read_flows(session=session)
flows = read_flows(session=session, current_user=current_user)
return FlowListRead(flows=flows)

View file

@ -0,0 +1,73 @@
from sqlmodel import Session
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from langflow.services.getters import get_session
from langflow.api.v1.schemas import Token
from langflow.services.auth.utils import (
authenticate_user,
create_user_tokens,
create_refresh_token,
create_user_longterm_token,
get_current_active_user,
)
from langflow.services.getters import get_settings_service
router = APIRouter(tags=["Login"])
@router.post("/login", response_model=Token)
async def login_to_get_access_token(
form_data: OAuth2PasswordRequestForm = Depends(),
db: Session = Depends(get_session),
# _: Session = Depends(get_current_active_user)
):
try:
user = authenticate_user(form_data.username, form_data.password, db)
except Exception as exc:
if isinstance(exc, HTTPException):
raise exc
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(exc),
) from exc
if user:
return create_user_tokens(user_id=user.id, db=db, update_last_login=True)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
@router.get("/auto_login")
async def auto_login(
db: Session = Depends(get_session), settings_service=Depends(get_settings_service)
):
if settings_service.auth_settings.AUTO_LOGIN:
return create_user_longterm_token(db)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail={
"message": "Auto login is disabled. Please enable it in the settings",
"auto_login": False,
},
)
@router.post("/refresh")
async def refresh_token(
token: str, current_user: Session = Depends(get_current_active_user)
):
if token:
return create_refresh_token(token)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid refresh token",
headers={"WWW-Authenticate": "Bearer"},
)

View file

@ -1,8 +1,12 @@
from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from langflow.database.models.base import orjson_dumps
from langflow.database.models.flow import FlowCreate, FlowRead
from uuid import UUID
from langflow.services.database.models.api_key.api_key import ApiKeyRead
from langflow.services.database.models.flow import FlowCreate, FlowRead
from langflow.services.database.models.user import UserRead
from langflow.services.database.models.base import orjson_dumps
from pydantic import BaseModel, Field, validator
@ -43,10 +47,30 @@ class UpdateTemplateRequest(BaseModel):
template: dict
class TaskResponse(BaseModel):
"""Task response schema."""
id: Optional[str] = Field(None)
href: Optional[str] = Field(None)
class ProcessResponse(BaseModel):
"""Process response schema."""
result: dict
result: Any
task: Optional[TaskResponse] = None
session_id: Optional[str] = None
backend: Optional[str] = None
# TaskStatusResponse(
# status=task.status, result=task.result if task.ready() else None
# )
class TaskStatusResponse(BaseModel):
"""Task status response schema."""
status: str
result: Optional[Any] = None
class ChatMessage(BaseModel):
@ -54,6 +78,7 @@ class ChatMessage(BaseModel):
is_bot: bool = False
message: Union[str, None, dict] = None
chatKey: Optional[str] = None
type: str = "human"
@ -61,6 +86,7 @@ class ChatResponse(ChatMessage):
"""Chat response schema."""
intermediate_steps: str
type: str
is_bot: bool = True
files: list = []
@ -72,6 +98,14 @@ class ChatResponse(ChatMessage):
return v
class PromptResponse(ChatMessage):
"""Prompt response schema."""
prompt: str
type: str = "prompt"
is_bot: bool = True
class FileResponse(ChatMessage):
"""File response schema."""
@ -135,3 +169,32 @@ class ComponentListCreate(BaseModel):
class ComponentListRead(BaseModel):
flows: List[FlowRead]
class UsersResponse(BaseModel):
total_count: int
users: List[UserRead]
class ApiKeyResponse(BaseModel):
id: str
api_key: str
name: str
created_at: str
last_used_at: str
class ApiKeysResponse(BaseModel):
total_count: int
user_id: UUID
api_keys: List[ApiKeyRead]
class CreateApiKeyRequest(BaseModel):
name: str
class Token(BaseModel):
access_token: str
refresh_token: str
token_type: str

View file

@ -0,0 +1,169 @@
from uuid import UUID
from langflow.api.v1.schemas import UsersResponse
from langflow.services.database.models.user import (
User,
UserCreate,
UserRead,
UserUpdate,
)
from sqlalchemy import func
from sqlalchemy.exc import IntegrityError
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
from langflow.services.getters import get_session, get_settings_service
from langflow.services.auth.utils import (
get_current_active_superuser,
get_current_active_user,
get_password_hash,
verify_password,
)
from langflow.services.database.models.user.crud import (
get_user_by_id,
update_user,
)
router = APIRouter(tags=["Users"], prefix="/users")
@router.post("/", response_model=UserRead, status_code=201)
def add_user(
user: UserCreate,
session: Session = Depends(get_session),
settings_service=Depends(get_settings_service),
) -> User:
"""
Add a new user to the database.
"""
new_user = User.from_orm(user)
try:
new_user.password = get_password_hash(user.password)
new_user.is_active = settings_service.auth_settings.NEW_USER_IS_ACTIVE
session.add(new_user)
session.commit()
session.refresh(new_user)
except IntegrityError as e:
session.rollback()
raise HTTPException(
status_code=400, detail="This username is unavailable."
) from e
return new_user
@router.get("/whoami", response_model=UserRead)
def read_current_user(
current_user: User = Depends(get_current_active_user),
) -> User:
"""
Retrieve the current user's data.
"""
return current_user
@router.get("/", response_model=UsersResponse)
def read_all_users(
skip: int = 0,
limit: int = 10,
_: Session = Depends(get_current_active_superuser),
session: Session = Depends(get_session),
) -> UsersResponse:
"""
Retrieve a list of users from the database with pagination.
"""
query = select(User).offset(skip).limit(limit)
users = session.execute(query).fetchall()
count_query = select(func.count()).select_from(User) # type: ignore
total_count = session.execute(count_query).scalar()
return UsersResponse(
total_count=total_count, # type: ignore
users=[UserRead(**dict(user.User)) for user in users],
)
@router.patch("/{user_id}", response_model=UserRead)
def patch_user(
user_id: UUID,
user_update: UserUpdate,
user: User = Depends(get_current_active_user),
session: Session = Depends(get_session),
) -> User:
"""
Update an existing user's data.
"""
if not user.is_superuser and user.id != user_id:
raise HTTPException(
status_code=403, detail="You don't have the permission to update this user"
)
if user_update.password:
if not user.is_superuser:
raise HTTPException(
status_code=400, detail="You can't change your password here"
)
user_update.password = get_password_hash(user_update.password)
if user_db := get_user_by_id(session, user_id):
return update_user(user_db, user_update, session)
else:
raise HTTPException(status_code=404, detail="User not found")
@router.patch("/{user_id}/reset-password", response_model=UserRead)
def reset_password(
user_id: UUID,
user_update: UserUpdate,
user: User = Depends(get_current_active_user),
session: Session = Depends(get_session),
) -> User:
"""
Reset a user's password.
"""
if user_id != user.id:
raise HTTPException(
status_code=400, detail="You can't change another user's password"
)
if not user:
raise HTTPException(status_code=404, detail="User not found")
if verify_password(user_update.password, user.password):
raise HTTPException(
status_code=400, detail="You can't use your current password"
)
new_password = get_password_hash(user_update.password)
user.password = new_password
session.commit()
session.refresh(user)
return user
@router.delete("/{user_id}", response_model=dict)
def delete_user(
user_id: UUID,
current_user: User = Depends(get_current_active_superuser),
session: Session = Depends(get_session),
) -> dict:
"""
Delete a user from the database.
"""
if current_user.id == user_id:
raise HTTPException(
status_code=400, detail="You can't delete your own user account"
)
elif not current_user.is_superuser:
raise HTTPException(
status_code=403, detail="You don't have the permission to delete this user"
)
user_db = session.query(User).filter(User.id == user_id).first()
if not user_db:
raise HTTPException(status_code=404, detail="User not found")
session.delete(user_db)
session.commit()
return {"detail": "User deleted"}

View file

@ -8,7 +8,7 @@ from langflow.api.v1.base import (
validate_prompt,
)
from langflow.template.field.base import TemplateField
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.validate import validate_code
# build router
@ -31,7 +31,12 @@ def post_validate_code(code: Code):
def post_validate_prompt(prompt_request: ValidatePromptRequest):
try:
input_variables = validate_prompt(prompt_request.template)
# Check if frontend_node is None before proceeding to avoid attempting to update a non-existent node.
if prompt_request.frontend_node is None:
return PromptValidationResponse(
input_variables=input_variables,
frontend_node=None,
)
old_custom_fields = get_old_custom_fields(prompt_request)
add_new_variables_to_template(input_variables, prompt_request)

View file

@ -1,7 +0,0 @@
from langflow.cache.manager import cache_manager
from langflow.cache.flow import InMemoryCache
__all__ = [
"cache_manager",
"InMemoryCache",
]

View file

@ -1,146 +0,0 @@
import threading
import time
from collections import OrderedDict
from langflow.cache.base import BaseCache
class InMemoryCache(BaseCache):
"""
A simple in-memory cache using an OrderedDict.
This cache supports setting a maximum size and expiration time for cached items.
When the cache is full, it uses a Least Recently Used (LRU) eviction policy.
Thread-safe using a threading Lock.
Attributes:
max_size (int, optional): Maximum number of items to store in the cache.
expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour.
Example:
cache = InMemoryCache(max_size=3, expiration_time=5)
# setting cache values
cache.set("a", 1)
cache.set("b", 2)
cache["c"] = 3
# getting cache values
a = cache.get("a")
b = cache["b"]
"""
def __init__(self, max_size=None, expiration_time=60 * 60):
"""
Initialize a new InMemoryCache instance.
Args:
max_size (int, optional): Maximum number of items to store in the cache.
expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour.
"""
self._cache = OrderedDict()
self._lock = threading.Lock()
self.max_size = max_size
self.expiration_time = expiration_time
def get(self, key):
"""
Retrieve an item from the cache.
Args:
key: The key of the item to retrieve.
Returns:
The value associated with the key, or None if the key is not found or the item has expired.
"""
with self._lock:
if key in self._cache:
item = self._cache.pop(key)
if (
self.expiration_time is None
or time.time() - item["time"] < self.expiration_time
):
# Move the key to the end to make it recently used
self._cache[key] = item
return item["value"]
else:
self.delete(key)
return None
def set(self, key, value):
"""
Add an item to the cache.
If the cache is full, the least recently used item is evicted.
Args:
key: The key of the item.
value: The value to cache.
"""
with self._lock:
if key in self._cache:
# Remove existing key before re-inserting to update order
self.delete(key)
elif self.max_size and len(self._cache) >= self.max_size:
# Remove least recently used item
self._cache.popitem(last=False)
self._cache[key] = {"value": value, "time": time.time()}
def get_or_set(self, key, value):
"""
Retrieve an item from the cache. If the item does not exist, set it with the provided value.
Args:
key: The key of the item.
value: The value to cache if the item doesn't exist.
Returns:
The cached value associated with the key.
"""
with self._lock:
if key in self._cache:
return self.get(key)
self.set(key, value)
return value
def delete(self, key):
"""
Remove an item from the cache.
Args:
key: The key of the item to remove.
"""
# with self._lock:
self._cache.pop(key, None)
def clear(self):
"""
Clear all items from the cache.
"""
with self._lock:
self._cache.clear()
def __contains__(self, key):
"""Check if the key is in the cache."""
return key in self._cache
def __getitem__(self, key):
"""Retrieve an item from the cache using the square bracket notation."""
return self.get(key)
def __setitem__(self, key, value):
"""Add an item to the cache using the square bracket notation."""
self.set(key, value)
def __delitem__(self, key):
"""Remove an item from the cache using the square bracket notation."""
self.delete(key)
def __len__(self):
"""Return the number of items in the cache."""
return len(self._cache)
def __repr__(self):
"""Return a string representation of the InMemoryCache instance."""
return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})"

View file

@ -1,7 +1,7 @@
from langflow import CustomComponent
from langchain.llms.base import BaseLLM
from langchain import PromptTemplate
from langchain.prompts import PromptTemplate
from langchain.schema import Document
@ -16,17 +16,14 @@ class PromptRunner(CustomComponent):
"info": "Make sure the prompt has all variables filled.",
},
"code": {"show": False},
"inputs": {"field_type": "code"},
}
def build(
self,
llm: BaseLLM,
prompt: PromptTemplate,
self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {}
) -> Document:
chain = prompt | llm
# The input is an empty dict because the prompt is already filled
result = chain.invoke({})
result = chain.invoke(input=inputs)
if hasattr(result, "content"):
result = result.content
self.repr_value = result

View file

@ -14,7 +14,7 @@ class MetalRetrieverComponent(CustomComponent):
"api_key": {"display_name": "API Key", "password": True},
"client_id": {"display_name": "Client ID", "password": True},
"index_id": {"display_name": "Index ID"},
"params": {"display_name": "Parameters", "field_type": "code"},
"params": {"display_name": "Parameters"},
"code": {"show": False},
}

View file

@ -2,7 +2,6 @@ from typing import Optional
from langflow import CustomComponent
from langchain.text_splitter import Language
from langchain.schema import Document
from langflow.utils.util import build_loader_repr_from_documents
class LanguageRecursiveTextSplitterComponent(CustomComponent):
@ -78,5 +77,4 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent):
)
docs = splitter.split_documents(documents)
self.repr_value = build_loader_repr_from_documents(docs)
return docs

View file

@ -1,6 +1,7 @@
from typing import Optional
from langflow import CustomComponent
from langchain.schema import Document
from langflow.utils.util import build_loader_repr_from_documents
class RecursiveCharacterTextSplitterComponent(CustomComponent):
@ -74,6 +75,5 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent):
)
docs = splitter.split_documents(documents)
# self.repr_value = build_loader_repr_from_documents(docs)
self.repr_value = separators
self.repr_value = build_loader_repr_from_documents(docs)
return docs

View file

@ -1,6 +1,6 @@
from langflow import CustomComponent
from langchain.schema import Document
from langflow.database.models.base import orjson_dumps
from langflow.services.database.models.base import orjson_dumps
import requests
from typing import Optional
@ -19,7 +19,6 @@ class GetRequest(CustomComponent):
},
"headers": {
"display_name": "Headers",
"field_type": "code",
"info": "The headers to send with the request.",
},
"code": {"show": False},

View file

@ -13,7 +13,7 @@
from langflow import CustomComponent
from langchain.schema import Document
from langflow.database.models.base import orjson_dumps
from langflow.services.database.models.base import orjson_dumps
class JSONDocumentBuilder(CustomComponent):

View file

@ -1,6 +1,6 @@
from langflow import CustomComponent
from langchain.schema import Document
from langflow.database.models.base import orjson_dumps
from langflow.services.database.models.base import orjson_dumps
import requests
from typing import Optional
@ -15,7 +15,6 @@ class PostRequest(CustomComponent):
"url": {"display_name": "URL", "info": "The URL to make the request to."},
"headers": {
"display_name": "Headers",
"field_type": "code",
"info": "The headers to send with the request.",
},
"code": {"show": False},

View file

@ -2,7 +2,7 @@ from typing import List, Optional
import requests
from langflow import CustomComponent
from langchain.schema import Document
from langflow.database.models.base import orjson_dumps
from langflow.services.database.models.base import orjson_dumps
class UpdateRequest(CustomComponent):
@ -15,7 +15,7 @@ class UpdateRequest(CustomComponent):
"url": {"display_name": "URL", "info": "The URL to make the request to."},
"headers": {
"display_name": "Headers",
"field_type": "code",
"field_type": "NestedDict",
"info": "The headers to send with the request.",
},
"code": {"show": False},

View file

@ -0,0 +1,109 @@
from typing import Optional, Union
from langflow import CustomComponent
from langchain.vectorstores import Chroma
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.schema import BaseRetriever
from langchain.embeddings.base import Embeddings
import chromadb # type: ignore
class ChromaComponent(CustomComponent):
"""
A custom component for implementing a Vector Store using Chroma.
"""
display_name: str = "Chroma (Custom Component)"
description: str = "Implementation of Vector Store using Chroma"
documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma"
beta = True
def build_config(self):
"""
Builds the configuration for the component.
Returns:
- dict: A dictionary containing the configuration options for the component.
"""
return {
"collection_name": {"display_name": "Collection Name", "value": "langflow"},
"persist": {"display_name": "Persist"},
"persist_directory": {"display_name": "Persist Directory"},
"code": {"show": False, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {"display_name": "Embedding"},
"chroma_server_cors_allow_origins": {
"display_name": "Server CORS Allow Origins",
"advanced": True,
},
"chroma_server_host": {"display_name": "Server Host", "advanced": True},
"chroma_server_port": {"display_name": "Server Port", "advanced": True},
"chroma_server_grpc_port": {
"display_name": "Server gRPC Port",
"advanced": True,
},
"chroma_server_ssl_enabled": {
"display_name": "Server SSL Enabled",
"advanced": True,
},
}
def build(
self,
collection_name: str,
persist: bool,
chroma_server_ssl_enabled: bool,
persist_directory: Optional[str] = None,
embedding: Optional[Embeddings] = None,
documents: Optional[Document] = None,
chroma_server_cors_allow_origins: Optional[str] = None,
chroma_server_host: Optional[str] = None,
chroma_server_port: Optional[int] = None,
chroma_server_grpc_port: Optional[int] = None,
) -> Union[VectorStore, BaseRetriever]:
"""
Builds the Vector Store or BaseRetriever object.
Args:
- collection_name (str): The name of the collection.
- persist_directory (Optional[str]): The directory to persist the Vector Store to.
- chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.
- persist (bool): Whether to persist the Vector Store or not.
- embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.
- documents (Optional[Document]): The documents to use for the Vector Store.
- chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.
- chroma_server_host (Optional[str]): The host for the Chroma server.
- chroma_server_port (Optional[int]): The port for the Chroma server.
- chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.
Returns:
- Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.
"""
# Chroma settings
chroma_settings = None
if chroma_server_host is not None:
chroma_settings = chromadb.config.Settings(
chroma_server_cors_allow_origins=chroma_server_cors_allow_origins
or None,
chroma_server_host=chroma_server_host,
chroma_server_port=chroma_server_port or None,
chroma_server_grpc_port=chroma_server_grpc_port or None,
chroma_server_ssl_enabled=chroma_server_ssl_enabled,
)
# If documents, then we need to create a Chroma instance using .from_documents
if documents is not None and embedding is not None:
return Chroma.from_documents(
documents=documents, # type: ignore
persist_directory=persist_directory if persist else None,
collection_name=collection_name,
embedding=embedding,
client_settings=chroma_settings,
)
return Chroma(
persist_directory=persist_directory, client_settings=chroma_settings
)

Some files were not shown because too many files have changed in this diff Show more