Merge remote-tracking branch 'origin/dev' into NGNMergeDev
This commit is contained in:
commit
88d91c48d8
274 changed files with 11682 additions and 3913 deletions
18
.env.example
18
.env.example
|
|
@ -45,3 +45,21 @@ LANGFLOW_OPEN_BROWSER=
|
|||
# Values: true, false
|
||||
# Example: LANGFLOW_REMOVE_API_KEYS=false
|
||||
LANGFLOW_REMOVE_API_KEYS=
|
||||
|
||||
# Whether to use RedisCache or InMemoryCache
|
||||
# Values: memory, redis
|
||||
# Example: LANGFLOW_CACHE_TYPE=memory
|
||||
# If you want to use redis then the following environment variables must be set:
|
||||
# LANGFLOW_REDIS_HOST (default: localhost)
|
||||
# LANGFLOW_REDIS_PORT (default: 6379)
|
||||
# LANGFLOW_REDIS_DB (default: 0)
|
||||
# LANGFLOW_REDIS_CACHE_EXPIRE (default: 3600)
|
||||
LANGFLOW_CACHE_TYPE=
|
||||
|
||||
# Superuser username
|
||||
# Example: LANGFLOW_SUPERUSER=admin
|
||||
LANGFLOW_SUPERUSER=
|
||||
|
||||
# Superuser password
|
||||
# Example: LANGFLOW_SUPERUSER_PASSWORD=123456
|
||||
LANGFLOW_SUPERUSER_PASSWORD=
|
||||
|
|
|
|||
44
.github/workflows/ci.yml
vendored
Normal file
44
.github/workflows/ci.yml
vendored
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
name: "Async API tests"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
pull_request:
|
||||
branches:
|
||||
- dev
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up Docker
|
||||
run: docker --version && docker-compose --version
|
||||
|
||||
- name: "Create env file"
|
||||
working-directory: ./deploy
|
||||
run: |
|
||||
echo "${{ secrets.ENV_FILE }}" > .env
|
||||
|
||||
- name: Build and start services
|
||||
|
||||
working-directory: ./deploy
|
||||
run: docker compose up --exit-code-from tests tests result_backend broker celeryworker db --build
|
||||
continue-on-error: true
|
||||
|
||||
- name: Stop services
|
||||
run: docker compose down
|
||||
1
.github/workflows/pre-release.yml
vendored
1
.github/workflows/pre-release.yml
vendored
|
|
@ -38,6 +38,7 @@ jobs:
|
|||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
draft: false
|
||||
generateReleaseNotes: true
|
||||
prerelease: true
|
||||
tag: v${{ steps.check-version.outputs.version }}
|
||||
commit: main
|
||||
- name: Publish to PyPI
|
||||
|
|
|
|||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
|
|
@ -45,11 +45,3 @@ jobs:
|
|||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
|
||||
run: |
|
||||
poetry publish
|
||||
|
||||
- name: Trigger build and push on langchain-serve
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ secrets.SERVE_GITHUB_TOKEN }}
|
||||
repository: jina-ai/langchain-serve
|
||||
event-type: langflow-push
|
||||
client-payload: '{"push_token": "${{ secrets.LCSERVE_PUSH_TOKEN }}", "branch": "main"}'
|
||||
|
|
|
|||
17
.github/workflows/test-lcserve-push.yml
vendored
17
.github/workflows/test-lcserve-push.yml
vendored
|
|
@ -1,17 +0,0 @@
|
|||
name: Trigger build and push on langchain-serve
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger build and push on langchain-serve
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ secrets.SERVE_GITHUB_TOKEN }}
|
||||
repository: jina-ai/langchain-serve
|
||||
event-type: langflow-push
|
||||
client-payload: '{"push_token": "${{ secrets.LCSERVE_PUSH_TOKEN }}", "branch": "dev"}'
|
||||
4
.github/workflows/test.yml
vendored
4
.github/workflows/test.yml
vendored
|
|
@ -7,7 +7,7 @@ on:
|
|||
branches: [dev]
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.4.0"
|
||||
POETRY_VERSION: "1.5.0"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
|
@ -16,6 +16,8 @@ jobs:
|
|||
matrix:
|
||||
python-version:
|
||||
- "3.10"
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install poetry
|
||||
|
|
|
|||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -254,3 +254,4 @@ langflow.db
|
|||
|
||||
/tmp/*
|
||||
src/backend/langflow/frontend/
|
||||
.docker
|
||||
22
Makefile
22
Makefile
|
|
@ -19,7 +19,8 @@ coverage:
|
|||
--cov-report term-missing:skip-covered
|
||||
|
||||
tests:
|
||||
poetry run pytest tests -n auto
|
||||
@make install_backend
|
||||
poetry run pytest tests
|
||||
|
||||
format:
|
||||
poetry run black .
|
||||
|
|
@ -41,10 +42,10 @@ run_frontend:
|
|||
cd src/frontend && npm start
|
||||
|
||||
run_cli:
|
||||
poetry run langflow --path src/frontend/build
|
||||
poetry run langflow run --path src/frontend/build
|
||||
|
||||
run_cli_debug:
|
||||
poetry run langflow --path src/frontend/build --log-level debug
|
||||
poetry run langflow run --path src/frontend/build --log-level debug
|
||||
|
||||
setup_devcontainer:
|
||||
make init
|
||||
|
|
@ -60,7 +61,7 @@ frontendc:
|
|||
make run_frontend
|
||||
|
||||
install_backend:
|
||||
poetry install
|
||||
poetry install --extras deploy
|
||||
|
||||
backend:
|
||||
make install_backend
|
||||
|
|
@ -69,7 +70,7 @@ backend:
|
|||
build_and_run:
|
||||
echo 'Removing dist folder'
|
||||
rm -rf dist
|
||||
make build && poetry run pip install dist/*.tar.gz && poetry run langflow
|
||||
make build && poetry run pip install dist/*.tar.gz && poetry run langflow run
|
||||
|
||||
build_and_install:
|
||||
echo 'Removing dist folder'
|
||||
|
|
@ -86,17 +87,6 @@ build:
|
|||
poetry build --format sdist
|
||||
rm -rf src/backend/langflow/frontend
|
||||
|
||||
lcserve_push:
|
||||
make build_frontend
|
||||
@version=$$(poetry version --short); \
|
||||
lc-serve push --app langflow.lcserve:app --app-dir . \
|
||||
--image-name langflow --image-tag $${version} --verbose --public
|
||||
|
||||
lcserve_deploy:
|
||||
@:$(if $(uses),,$(error `uses` is not set. Please run `make uses=... lcserve_deploy`))
|
||||
lc-serve deploy jcloud --app langflow.lcserve:app --app-dir . \
|
||||
--uses $(uses) --config src/backend/langflow/jcloud.yml --verbose
|
||||
|
||||
dev:
|
||||
make install_frontend
|
||||
ifeq ($(build),1)
|
||||
|
|
|
|||
118
README.md
118
README.md
|
|
@ -36,8 +36,6 @@
|
|||
- [Environment Variables](#environment-variables)
|
||||
- [Deployment](#deployment)
|
||||
- [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
|
||||
- [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
|
||||
- [API Usage](#api-usage)
|
||||
- [Deploy on Railway](#deploy-on-railway)
|
||||
- [Deploy on Render](#deploy-on-render)
|
||||
- [🎨 Creating Flows](#-creating-flows)
|
||||
|
|
@ -78,7 +76,7 @@ python -m langflow
|
|||
or
|
||||
|
||||
```shell
|
||||
langflow # or langflow --help
|
||||
langflow run # or langflow --help
|
||||
```
|
||||
|
||||
### HuggingFace Spaces
|
||||
|
|
@ -94,7 +92,7 @@ Langflow provides a command-line interface (CLI) for easy management and configu
|
|||
You can run the Langflow using the following command:
|
||||
|
||||
```shell
|
||||
langflow [OPTIONS]
|
||||
langflow run [OPTIONS]
|
||||
```
|
||||
|
||||
Each option is detailed below:
|
||||
|
|
@ -110,7 +108,6 @@ Each option is detailed below:
|
|||
- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`.
|
||||
- `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`.
|
||||
- `--cache`: Selects the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`.
|
||||
- `--jcloud/--no-jcloud`: Toggles the option to deploy on Jina AI Cloud. The default is `no-jcloud`.
|
||||
- `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`.
|
||||
- `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable.
|
||||
- `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`.
|
||||
|
|
@ -134,118 +131,9 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
|
|||
|
||||
[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
|
||||
|
||||
## Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
|
||||
|
||||
Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud.
|
||||
|
||||
Start by installing `langchain-serve` with
|
||||
|
||||
```bash
|
||||
pip install langflow[deploy]
|
||||
# or
|
||||
pip install -U langchain-serve
|
||||
```
|
||||
|
||||
Then, run:
|
||||
|
||||
```bash
|
||||
langflow --jcloud
|
||||
```
|
||||
|
||||
```text
|
||||
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
|
||||
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://<your-app>.wolf.jina.ai/
|
||||
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Show complete (example) output</summary>
|
||||
|
||||
```text
|
||||
🚀 Deploying Langflow server on Jina AI Cloud
|
||||
╭───────────────────────── 🎉 Flow is available! ──────────────────────────╮
|
||||
│ │
|
||||
│ ID langflow-e3dd8820ec │
|
||||
│ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai │
|
||||
│ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec │
|
||||
│ │
|
||||
╰──────────────────────────────────────────────────────────────────────────╯
|
||||
╭──────────────┬──────────────────────────────────────────────────────────────────────────────╮
|
||||
│ App ID │ langflow-e3dd8820ec │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ Phase │ Serving │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ Endpoint │ wss://langflow-e3dd8820ec.wolf.jina.ai │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ App logs │ dashboards.wolf.jina.ai │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ Swagger UI │ https://langflow-e3dd8820ec.wolf.jina.ai/docs │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ OpenAPI JSON │ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json │
|
||||
╰──────────────┴──────────────────────────────────────────────────────────────────────────────╯
|
||||
|
||||
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
|
||||
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/
|
||||
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
#### API Usage
|
||||
|
||||
You can use Langflow directly on your browser, or use the API endpoints on Jina AI Cloud to interact with the server.
|
||||
|
||||
<details>
|
||||
<summary>Show API usage (with python)</summary>
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict"
|
||||
FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468"
|
||||
# You can tweak the flow by adding a tweaks dictionary
|
||||
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
|
||||
TWEAKS = {
|
||||
"ChatOpenAI-g4jEr": {},
|
||||
"ConversationChain-UidfJ": {}
|
||||
}
|
||||
|
||||
def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
|
||||
"""
|
||||
Run a flow with a given message and optional tweaks.
|
||||
|
||||
:param message: The message to send to the flow
|
||||
:param flow_id: The ID of the flow to run
|
||||
:param tweaks: Optional tweaks to customize the flow
|
||||
:return: The JSON response from the flow
|
||||
"""
|
||||
api_url = f"{BASE_API_URL}/{flow_id}"
|
||||
|
||||
payload = {"message": message}
|
||||
|
||||
if tweaks:
|
||||
payload["tweaks"] = tweaks
|
||||
|
||||
response = requests.post(api_url, json=payload)
|
||||
return response.json()
|
||||
|
||||
# Setup any tweaks you want to apply to the flow
|
||||
print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!"
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
> Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
|
||||
|
||||
## Deploy on Railway
|
||||
|
||||
[](https://railway.app/template/Emy2sU?referralCode=MnPSdg)
|
||||
[](https://railway.app/template/JMXEWp?referralCode=MnPSdg)
|
||||
|
||||
## Deploy on Render
|
||||
|
||||
|
|
|
|||
97
base.Dockerfile
Normal file
97
base.Dockerfile
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
|
||||
|
||||
# syntax=docker/dockerfile:1
|
||||
# Keep this syntax directive! It's used to enable Docker BuildKit
|
||||
|
||||
# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865
|
||||
# but I try to keep it updated (see history)
|
||||
|
||||
################################
|
||||
# PYTHON-BASE
|
||||
# Sets up all our shared environment variables
|
||||
################################
|
||||
FROM python:3.10-slim as python-base
|
||||
|
||||
# python
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
# prevents python creating .pyc files
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
\
|
||||
# pip
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=on \
|
||||
PIP_DEFAULT_TIMEOUT=100 \
|
||||
\
|
||||
# poetry
|
||||
# https://python-poetry.org/docs/configuration/#using-environment-variables
|
||||
POETRY_VERSION=1.5.1 \
|
||||
# make poetry install to this location
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
# make poetry create the virtual environment in the project's root
|
||||
# it gets named `.venv`
|
||||
POETRY_VIRTUALENVS_IN_PROJECT=true \
|
||||
# do not ask any interactive question
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
\
|
||||
# paths
|
||||
# this is where our requirements + virtual environment will live
|
||||
PYSETUP_PATH="/opt/pysetup" \
|
||||
VENV_PATH="/opt/pysetup/.venv"
|
||||
|
||||
|
||||
# prepend poetry and venv to path
|
||||
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
|
||||
|
||||
|
||||
################################
|
||||
# BUILDER-BASE
|
||||
# Used to build deps + create our virtual environment
|
||||
################################
|
||||
FROM python-base as builder-base
|
||||
RUN apt-get update \
|
||||
&& apt-get install --no-install-recommends -y \
|
||||
# deps for installing poetry
|
||||
curl \
|
||||
# deps for building python deps
|
||||
build-essential
|
||||
|
||||
|
||||
# install poetry - respects $POETRY_VERSION & $POETRY_HOME
|
||||
# The --mount will mount the buildx cache directory to where
|
||||
# Poetry and Pip store their cache so that they can re-use it
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
# copy project requirement files here to ensure they will be cached.
|
||||
WORKDIR $PYSETUP_PATH
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
COPY ./src/backend/langflow/main.py ./src/backend/langflow/main.py
|
||||
# Copy README.md to the build context
|
||||
COPY README.md .
|
||||
# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
poetry install --without dev --extras deploy
|
||||
|
||||
|
||||
################################
|
||||
# DEVELOPMENT
|
||||
# Image used during development / testing
|
||||
################################
|
||||
FROM python-base as development
|
||||
WORKDIR $PYSETUP_PATH
|
||||
|
||||
# copy in our built poetry + venv
|
||||
COPY --from=builder-base $POETRY_HOME $POETRY_HOME
|
||||
COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH
|
||||
|
||||
# Copy just one file to avoid rebuilding the whole image
|
||||
COPY ./src/backend/langflow/__init__.py ./src/backend/langflow/__init__.py
|
||||
# quicker install as runtime deps are already installed
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
poetry install --with=dev --extras deploy
|
||||
|
||||
# copy in our app code
|
||||
COPY ./src/backend ./src/backend
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
poetry install --with=dev --extras deploy
|
||||
COPY ./tests ./tests=
|
||||
|
||||
57
deploy/.env.example
Normal file
57
deploy/.env.example
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
DOMAIN=localhost
|
||||
STACK_NAME=langflow-stack
|
||||
ENVIRONMENT=development
|
||||
|
||||
TRAEFIK_PUBLIC_NETWORK=traefik-public
|
||||
TRAEFIK_TAG=langflow-traefik
|
||||
TRAEFIK_PUBLIC_TAG=traefik-public
|
||||
|
||||
# RabbitMQ configuration
|
||||
RABBITMQ_DEFAULT_USER=langflow
|
||||
RABBITMQ_DEFAULT_PASS=langflow
|
||||
|
||||
# Database configuration
|
||||
DB_USER=langflow
|
||||
DB_PASSWORD=langflow
|
||||
DB_HOST=db
|
||||
DB_PORT=5432
|
||||
DB_NAME=langflow
|
||||
|
||||
# Logging configuration
|
||||
LOG_LEVEL=debug
|
||||
|
||||
# DB configuration
|
||||
POSTGRES_USER=langflow
|
||||
POSTGRES_PASSWORD=langflow
|
||||
POSTGRES_DB=langflow
|
||||
POSTGRES_PORT=5432
|
||||
|
||||
# Flower configuration
|
||||
LANGFLOW_CACHE_TYPE=redis
|
||||
LANGFLOW_REDIS_HOST=result_backend
|
||||
LANGFLOW_REDIS_PORT=6379
|
||||
LANGFLOW_REDIS_DB=0
|
||||
LANGFLOW_REDIS_EXPIRE=3600
|
||||
LANGFLOW_REDIS_PASSWORD=
|
||||
FLOWER_UNAUTHENTICATED_API=True
|
||||
BROKER_URL=amqp://langflow:langflow@broker:5672
|
||||
RESULT_BACKEND=redis://result_backend:6379/0
|
||||
C_FORCE_ROOT="true"
|
||||
|
||||
# Frontend configuration
|
||||
VITE_PROXY_TARGET=http://backend:7860/api/
|
||||
BACKEND_URL=http://backend:7860
|
||||
|
||||
# PGAdmin configuration
|
||||
PGADMIN_DEFAULT_EMAIL=admin@admin.com
|
||||
PGADMIN_DEFAULT_PASSWORD=admin
|
||||
|
||||
# OpenAI configuration (for testing purposes)
|
||||
OPENAI_API_KEY=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
|
||||
|
||||
# Superuser configuration
|
||||
LANGFLOW_SUPERUSER=superuser
|
||||
LANGFLOW_SUPERUSER_PASSWORD=superuser
|
||||
|
||||
# New user configuration
|
||||
LANGFLOW_NEW_USER_IS_ACTIVE=False
|
||||
1
deploy/.gitignore
vendored
Normal file
1
deploy/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
pgadmin
|
||||
92
deploy/base.Dockerfile
Normal file
92
deploy/base.Dockerfile
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
|
||||
|
||||
# syntax=docker/dockerfile:1
|
||||
# Keep this syntax directive! It's used to enable Docker BuildKit
|
||||
|
||||
# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865
|
||||
# but I try to keep it updated (see history)
|
||||
|
||||
################################
|
||||
# PYTHON-BASE
|
||||
# Sets up all our shared environment variables
|
||||
################################
|
||||
FROM python:3.10-slim as python-base
|
||||
|
||||
# python
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
# prevents python creating .pyc files
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
\
|
||||
# pip
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=on \
|
||||
PIP_DEFAULT_TIMEOUT=100 \
|
||||
\
|
||||
# poetry
|
||||
# https://python-poetry.org/docs/configuration/#using-environment-variables
|
||||
POETRY_VERSION=1.5.1 \
|
||||
# make poetry install to this location
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
# make poetry create the virtual environment in the project's root
|
||||
# it gets named `.venv`
|
||||
POETRY_VIRTUALENVS_IN_PROJECT=true \
|
||||
# do not ask any interactive question
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
\
|
||||
# paths
|
||||
# this is where our requirements + virtual environment will live
|
||||
PYSETUP_PATH="/opt/pysetup" \
|
||||
VENV_PATH="/opt/pysetup/.venv"
|
||||
|
||||
|
||||
# prepend poetry and venv to path
|
||||
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
|
||||
|
||||
|
||||
################################
|
||||
# BUILDER-BASE
|
||||
# Used to build deps + create our virtual environment
|
||||
################################
|
||||
FROM python-base as builder-base
|
||||
RUN apt-get update \
|
||||
&& apt-get install --no-install-recommends -y \
|
||||
# deps for installing poetry
|
||||
curl \
|
||||
# deps for building python deps
|
||||
build-essential
|
||||
|
||||
# install poetry - respects $POETRY_VERSION & $POETRY_HOME
|
||||
# The --mount will mount the buildx cache directory to where
|
||||
# Poetry and Pip store their cache so that they can re-use it
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
# copy project requirement files here to ensure they will be cached.
|
||||
WORKDIR $PYSETUP_PATH
|
||||
COPY ./poetry.lock ./pyproject.toml ./
|
||||
# Copy README.md to the build context
|
||||
COPY ./README.md ./
|
||||
# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
poetry install --without dev --extras deploy
|
||||
|
||||
|
||||
################################
|
||||
# DEVELOPMENT
|
||||
# Image used during development / testing
|
||||
################################
|
||||
FROM python-base as development
|
||||
WORKDIR $PYSETUP_PATH
|
||||
|
||||
# copy in our built poetry + venv
|
||||
COPY --from=builder-base $POETRY_HOME $POETRY_HOME
|
||||
COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH
|
||||
|
||||
# Copy just one file to avoid rebuilding the whole image
|
||||
COPY ./src/backend/langflow/__init__.py ./src/backend/langflow/__init__.py
|
||||
# quicker install as runtime deps are already installed
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
poetry install --with=dev --extras deploy
|
||||
|
||||
# copy in our app code
|
||||
COPY ./src/backend ./src/backend
|
||||
COPY ./tests ./tests
|
||||
67
deploy/docker-compose.override.yml
Normal file
67
deploy/docker-compose.override.yml
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
version: "3.8"
|
||||
|
||||
services:
|
||||
proxy:
|
||||
ports:
|
||||
- "80:80"
|
||||
- "8090:8080"
|
||||
command:
|
||||
# Enable Docker in Traefik, so that it reads labels from Docker services
|
||||
- --providers.docker
|
||||
# Add a constraint to only use services with the label for this stack
|
||||
# from the env var TRAEFIK_TAG
|
||||
- --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`)
|
||||
# Do not expose all Docker services, only the ones explicitly exposed
|
||||
- --providers.docker.exposedbydefault=false
|
||||
# Disable Docker Swarm mode for local development
|
||||
# - --providers.docker.swarmmode
|
||||
# Enable the access log, with HTTP requests
|
||||
- --accesslog
|
||||
# Enable the Traefik log, for configurations and errors
|
||||
- --log
|
||||
# Enable the Dashboard and API
|
||||
- --api
|
||||
# Enable the Dashboard and API in insecure mode for local development
|
||||
- --api.insecure=true
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-traefik-public-http.rule=Host(`${DOMAIN?Variable not set}`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-traefik-public.loadbalancer.server.port=80
|
||||
|
||||
result_backend:
|
||||
ports:
|
||||
- "6379:6379"
|
||||
|
||||
pgadmin:
|
||||
ports:
|
||||
- "5050:5050"
|
||||
|
||||
flower:
|
||||
ports:
|
||||
- "5555:5555"
|
||||
|
||||
backend:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
|
||||
|
||||
frontend:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80
|
||||
|
||||
celeryworker:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-celeryworker-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-celeryworker.loadbalancer.server.port=7860
|
||||
|
||||
networks:
|
||||
traefik-public:
|
||||
# For local dev, don't expect an external Traefik network
|
||||
external: false
|
||||
277
deploy/docker-compose.with_tests.yml
Normal file
277
deploy/docker-compose.with_tests.yml
Normal file
|
|
@ -0,0 +1,277 @@
|
|||
version: "3.8"
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v3.0
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- default
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command:
|
||||
# Enable Docker in Traefik, so that it reads labels from Docker services
|
||||
- --providers.docker
|
||||
# Add a constraint to only use services with the label for this stack
|
||||
# from the env var TRAEFIK_TAG
|
||||
- --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`)
|
||||
# Do not expose all Docker services, only the ones explicitly exposed
|
||||
- --providers.docker.exposedbydefault=false
|
||||
# Enable the access log, with HTTP requests
|
||||
- --accesslog
|
||||
# Enable the Traefik log, for configurations and errors
|
||||
- --log
|
||||
# Enable the Dashboard and API
|
||||
- --api
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
# Enable Traefik for this service, to make it available in the public network
|
||||
- traefik.enable=true
|
||||
# Use the traefik-public network (declared below)
|
||||
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
# Use the custom label "traefik.constraint-label=traefik-public"
|
||||
# This public Traefik will only use services with this label
|
||||
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
|
||||
# traefik-http set up only to use the middleware to redirect to https
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.scheme=https
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.permanent=true
|
||||
# Handle host with and without "www" to redirect to only one of them
|
||||
# Uses environment variable DOMAIN
|
||||
# To disable www redirection remove the Host() you want to discard, here and
|
||||
# below for HTTPS
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.entrypoints=http
|
||||
# traefik-https the actual router using HTTPS
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.entrypoints=https
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls=true
|
||||
# Use the "le" (Let's Encrypt) resolver created below
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls.certresolver=le
|
||||
# Define the port inside of the Docker service to use
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-proxy.loadbalancer.server.port=80
|
||||
# Handle domain with and without "www" to redirect to only one
|
||||
# To disable www redirection remove the next line
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.regex=^https?://(www.)?(${DOMAIN?Variable not set})/(.*)
|
||||
# Redirect a domain with www to non-www
|
||||
# To disable it remove the next line
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.replacement=https://${DOMAIN?Variable not set}/$${3}
|
||||
# Redirect a domain without www to www
|
||||
# To enable it remove the previous line and uncomment the next
|
||||
# - traefik.http.middlewares.${STACK_NAME}-www-redirect.redirectregex.replacement=https://www.${DOMAIN}/$${3}
|
||||
# Middleware to redirect www, to disable it remove the next line
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.middlewares=${STACK_NAME?Variable not set}-www-redirect
|
||||
# Middleware to redirect www, and redirect HTTP to HTTPS
|
||||
# to disable www redirection remove the section: ${STACK_NAME?Variable not set}-www-redirect,
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.middlewares=${STACK_NAME?Variable not set}-www-redirect,${STACK_NAME?Variable not set}-https-redirect
|
||||
|
||||
backend: &backend
|
||||
image: "ogabrielluiz/langflow:latest"
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: base.Dockerfile
|
||||
depends_on:
|
||||
- db
|
||||
- broker
|
||||
- result_backend
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ../:/app
|
||||
- ./startup-backend.sh:/startup-backend.sh # Ensure the paths match
|
||||
command: /startup-backend.sh # Fixed the path
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
|
||||
|
||||
db:
|
||||
image: postgres:15.4
|
||||
volumes:
|
||||
- app-db-data:/var/lib/postgresql/data/pgdata
|
||||
environment:
|
||||
- PGDATA=/var/lib/postgresql/data/pgdata
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.app-db-data == true
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4
|
||||
networks:
|
||||
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- default
|
||||
volumes:
|
||||
- pgadmin-data:/var/lib/pgadmin
|
||||
env_file:
|
||||
- .env
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.entrypoints=http
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.entrypoints=https
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls=true
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls.certresolver=le
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-pgadmin.loadbalancer.server.port=5050
|
||||
|
||||
result_backend:
|
||||
image: redis:6.2.5
|
||||
env_file:
|
||||
- .env
|
||||
# ports:
|
||||
# - 6379:6379
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
|
||||
celeryworker:
|
||||
<<: *backend
|
||||
env_file:
|
||||
- .env
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: base.Dockerfile
|
||||
command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
flower:
|
||||
<<: *backend
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- default
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: base.Dockerfile
|
||||
environment:
|
||||
- FLOWER_PORT=5555
|
||||
|
||||
command: /bin/sh -c "celery -A langflow.worker.celery_app --broker=${BROKER_URL?Variable not set} flower --port=5555"
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.rule=Host(`flower.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.entrypoints=http
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.rule=Host(`flower.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.entrypoints=https
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls=true
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls.certresolver=le
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-flower.loadbalancer.server.port=5555
|
||||
|
||||
frontend:
|
||||
image: "ogabrielluiz/langflow_frontend:latest"
|
||||
env_file:
|
||||
- .env
|
||||
# user: your-non-root-user
|
||||
build:
|
||||
context: ../src/frontend
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- BACKEND_URL=http://backend:7860
|
||||
restart: on-failure
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80
|
||||
|
||||
broker:
|
||||
# RabbitMQ management console
|
||||
image: rabbitmq:3-management
|
||||
environment:
|
||||
- RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER:-admin}
|
||||
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS:-admin}
|
||||
volumes:
|
||||
- rabbitmq_data:/etc/rabbitmq/
|
||||
- rabbitmq_data:/var/lib/rabbitmq/
|
||||
- rabbitmq_log:/var/log/rabbitmq/
|
||||
ports:
|
||||
- 5672:5672
|
||||
- 15672:15672
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus:v2.37.9
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
# ports:
|
||||
# - 9090:9090
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-prometheus-http.rule=PathPrefix(`/metrics`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-prometheus.loadbalancer.server.port=9090
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:8.2.6
|
||||
env_file:
|
||||
- .env
|
||||
# ports:
|
||||
# - 3000:3000
|
||||
volumes:
|
||||
- grafana_data:/var/lib/grafana
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-grafana-http.rule=PathPrefix(`/grafana`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-grafana.loadbalancer.server.port=3000
|
||||
|
||||
tests:
|
||||
extends:
|
||||
file: docker-compose.yml
|
||||
service: backend
|
||||
env_file:
|
||||
- .env
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: base.Dockerfile
|
||||
command: pytest -vv
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
# override deploy labels to avoid conflicts with the backend service
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-tests-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-tests.loadbalancer.server.port=7861
|
||||
|
||||
volumes:
|
||||
grafana_data:
|
||||
app-db-data:
|
||||
rabbitmq_data:
|
||||
rabbitmq_log:
|
||||
pgadmin-data:
|
||||
|
||||
networks:
|
||||
traefik-public:
|
||||
# Allow setting it to false for testing
|
||||
external: false # ${TRAEFIK_PUBLIC_NETWORK_IS_EXTERNAL-true}
|
||||
258
deploy/docker-compose.yml
Normal file
258
deploy/docker-compose.yml
Normal file
|
|
@ -0,0 +1,258 @@
|
|||
version: "3.8"
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v3.0
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- default
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command:
|
||||
# Enable Docker in Traefik, so that it reads labels from Docker services
|
||||
- --providers.docker
|
||||
# Add a constraint to only use services with the label for this stack
|
||||
# from the env var TRAEFIK_TAG
|
||||
- --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`)
|
||||
# Do not expose all Docker services, only the ones explicitly exposed
|
||||
- --providers.docker.exposedbydefault=false
|
||||
# Enable the access log, with HTTP requests
|
||||
- --accesslog
|
||||
# Enable the Traefik log, for configurations and errors
|
||||
- --log
|
||||
# Enable the Dashboard and API
|
||||
- --api
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
# Enable Traefik for this service, to make it available in the public network
|
||||
- traefik.enable=true
|
||||
# Use the traefik-public network (declared below)
|
||||
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
# Use the custom label "traefik.constraint-label=traefik-public"
|
||||
# This public Traefik will only use services with this label
|
||||
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
|
||||
# traefik-http set up only to use the middleware to redirect to https
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.scheme=https
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.permanent=true
|
||||
# Handle host with and without "www" to redirect to only one of them
|
||||
# Uses environment variable DOMAIN
|
||||
# To disable www redirection remove the Host() you want to discard, here and
|
||||
# below for HTTPS
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.entrypoints=http
|
||||
# traefik-https the actual router using HTTPS
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.entrypoints=https
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls=true
|
||||
# Use the "le" (Let's Encrypt) resolver created below
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls.certresolver=le
|
||||
# Define the port inside of the Docker service to use
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-proxy.loadbalancer.server.port=80
|
||||
# Handle domain with and without "www" to redirect to only one
|
||||
# To disable www redirection remove the next line
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.regex=^https?://(www.)?(${DOMAIN?Variable not set})/(.*)
|
||||
# Redirect a domain with www to non-www
|
||||
# To disable it remove the next line
|
||||
- traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.replacement=https://${DOMAIN?Variable not set}/$${3}
|
||||
# Redirect a domain without www to www
|
||||
# To enable it remove the previous line and uncomment the next
|
||||
# - traefik.http.middlewares.${STACK_NAME}-www-redirect.redirectregex.replacement=https://www.${DOMAIN}/$${3}
|
||||
# Middleware to redirect www, to disable it remove the next line
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.middlewares=${STACK_NAME?Variable not set}-www-redirect
|
||||
# Middleware to redirect www, and redirect HTTP to HTTPS
|
||||
# to disable www redirection remove the section: ${STACK_NAME?Variable not set}-www-redirect,
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.middlewares=${STACK_NAME?Variable not set}-www-redirect,${STACK_NAME?Variable not set}-https-redirect
|
||||
|
||||
backend: &backend
|
||||
image: "ogabrielluiz/langflow:latest"
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: base.Dockerfile
|
||||
depends_on:
|
||||
- db
|
||||
- broker
|
||||
- result_backend
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ../:/app
|
||||
- ./startup-backend.sh:/startup-backend.sh # Ensure the paths match
|
||||
command: /startup-backend.sh # Fixed the path
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860
|
||||
|
||||
db:
|
||||
image: postgres:15.4
|
||||
volumes:
|
||||
- app-db-data:/var/lib/postgresql/data/pgdata
|
||||
environment:
|
||||
- PGDATA=/var/lib/postgresql/data/pgdata
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.app-db-data == true
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4
|
||||
networks:
|
||||
- ${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- default
|
||||
volumes:
|
||||
- pgadmin-data:/var/lib/pgadmin
|
||||
env_file:
|
||||
- .env
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.entrypoints=http
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.rule=Host(`pgadmin.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.entrypoints=https
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls=true
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls.certresolver=le
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-pgadmin.loadbalancer.server.port=5050
|
||||
|
||||
result_backend:
|
||||
image: redis:6.2.5
|
||||
env_file:
|
||||
- .env
|
||||
# ports:
|
||||
# - 6379:6379
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
|
||||
celeryworker:
|
||||
<<: *backend
|
||||
env_file:
|
||||
- .env
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: base.Dockerfile
|
||||
command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
flower:
|
||||
<<: *backend
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- default
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: base.Dockerfile
|
||||
environment:
|
||||
- FLOWER_PORT=5555
|
||||
|
||||
command: /bin/sh -c "celery -A langflow.worker.celery_app --broker=${BROKER_URL?Variable not set} flower --port=5555"
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set}
|
||||
- traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.rule=Host(`flower.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.entrypoints=http
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.middlewares=${STACK_NAME?Variable not set}-https-redirect
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.rule=Host(`flower.${DOMAIN?Variable not set}`)
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.entrypoints=https
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls=true
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls.certresolver=le
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-flower.loadbalancer.server.port=5555
|
||||
|
||||
frontend:
|
||||
image: "ogabrielluiz/langflow_frontend:latest"
|
||||
env_file:
|
||||
- .env
|
||||
# user: your-non-root-user
|
||||
build:
|
||||
context: ../src/frontend
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- BACKEND_URL=http://backend:7860
|
||||
restart: on-failure
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80
|
||||
|
||||
broker:
|
||||
# RabbitMQ management console
|
||||
image: rabbitmq:3-management
|
||||
environment:
|
||||
- RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER:-admin}
|
||||
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS:-admin}
|
||||
volumes:
|
||||
- rabbitmq_data:/etc/rabbitmq/
|
||||
- rabbitmq_data:/var/lib/rabbitmq/
|
||||
- rabbitmq_log:/var/log/rabbitmq/
|
||||
ports:
|
||||
- 5672:5672
|
||||
- 15672:15672
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus:v2.37.9
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
# ports:
|
||||
# - 9090:9090
|
||||
healthcheck:
|
||||
test: "exit 0"
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-prometheus-http.rule=PathPrefix(`/metrics`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-prometheus.loadbalancer.server.port=9090
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:8.2.6
|
||||
env_file:
|
||||
- .env
|
||||
# ports:
|
||||
# - 3000:3000
|
||||
volumes:
|
||||
- grafana_data:/var/lib/grafana
|
||||
deploy:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set}
|
||||
- traefik.http.routers.${STACK_NAME?Variable not set}-grafana-http.rule=PathPrefix(`/grafana`)
|
||||
- traefik.http.services.${STACK_NAME?Variable not set}-grafana.loadbalancer.server.port=3000
|
||||
|
||||
volumes:
|
||||
grafana_data:
|
||||
app-db-data:
|
||||
rabbitmq_data:
|
||||
rabbitmq_log:
|
||||
pgadmin-data:
|
||||
|
||||
networks:
|
||||
traefik-public:
|
||||
# Allow setting it to false for testing
|
||||
external: false # ${TRAEFIK_PUBLIC_NETWORK_IS_EXTERNAL-true}
|
||||
11
deploy/prometheus.yml
Normal file
11
deploy/prometheus.yml
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
global:
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: prometheus
|
||||
static_configs:
|
||||
- targets: ["prometheus:9090"]
|
||||
- job_name: flower
|
||||
static_configs:
|
||||
- targets: ["flower:5555"]
|
||||
17
deploy/startup-backend.sh
Executable file
17
deploy/startup-backend.sh
Executable file
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/bash
|
||||
|
||||
export LANGFLOW_DATABASE_URL="postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}"
|
||||
|
||||
|
||||
# Your command to start the backend
|
||||
|
||||
# If the ENVIRONMENT variable is set to "development", then start the backend in development mode
|
||||
# else start the backend in production mode with guvicorn
|
||||
if [ "$ENVIRONMENT" = "development" ]; then
|
||||
echo "Starting backend in development mode"
|
||||
exec python -m uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --log-level ${LOG_LEVEL:-info} --workers 2 --reload
|
||||
else
|
||||
echo "Starting backend in production mode"
|
||||
exec langflow run --host 0.0.0.0 --port 7860 --log-level ${LOG_LEVEL:-info} --workers -1 --backend-only
|
||||
fi
|
||||
|
||||
|
|
@ -33,6 +33,7 @@ The CustomComponent class serves as the foundation for creating custom component
|
|||
| Supported Types |
|
||||
| --------------------------------------------------------- |
|
||||
| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ |
|
||||
| _`langflow.field_typing.NestedDict`_ |
|
||||
| _`langchain.chains.base.Chain`_ |
|
||||
| _`langchain.PromptTemplate`_ |
|
||||
| _`langchain.llms.base.BaseLLM`_ |
|
||||
|
|
@ -44,6 +45,8 @@ The CustomComponent class serves as the foundation for creating custom component
|
|||
| _`langchain.embeddings.base.Embeddings`_ |
|
||||
| _`langchain.schema.BaseRetriever`_ |
|
||||
|
||||
The difference between _`dict`_ and _`langflow.field_typing.NestedDict`_ is that one adds a simple key-value pair field, while the other opens a more robust dictionary editor.
|
||||
|
||||
<Admonition type="info">
|
||||
Unlike Langchain types, base Python types do not add a
|
||||
[handle](../guidelines/components) to the field by default. To add handles,
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import Admonition from '@theme/Admonition';
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Text Splitters
|
||||
|
||||
<Admonition type="caution" icon="🚧" title="ZONE UNDER CONSTRUCTION">
|
||||
<p>
|
||||
We appreciate your understanding as we polish our documentation – it may contain some rough edges. Share your feedback or report issues to help us improve! 🛠️📝
|
||||
</p>
|
||||
<p>
|
||||
We appreciate your understanding as we polish our documentation – it may
|
||||
contain some rough edges. Share your feedback or report issues to help us
|
||||
improve! 🛠️📝
|
||||
</p>
|
||||
</Admonition>
|
||||
|
||||
A text splitter is a tool that divides a document or text into smaller chunks or segments. It is used to break down large texts into more manageable pieces for analysis or processing.
|
||||
|
|
@ -22,13 +24,13 @@ The `CharacterTextSplitter` is used to split a long text into smaller chunks bas
|
|||
|
||||
- **chunk_overlap:** Determines the number of characters that overlap between consecutive chunks when splitting text. It specifies how much of the previous chunk should be included in the next chunk.
|
||||
|
||||
For example, if the `chunk_overlap` is set to 20 and the `chunk_size` is set to 100, the splitter will create chunks of 100 characters each, but the last 20 characters of each chunk will overlap with the first 20 characters of the next chunk. This allows for a smoother transition between chunks and ensures that no information is lost – defaults to `200`.
|
||||
For example, if the `chunk_overlap` is set to 20 and the `chunk_size` is set to 100, the splitter will create chunks of 100 characters each, but the last 20 characters of each chunk will overlap with the first 20 characters of the next chunk. This allows for a smoother transition between chunks and ensures that no information is lost – defaults to `200`.
|
||||
|
||||
- **chunk_size:** Determines the maximum number of characters in each chunk when splitting a text. It specifies the size or length of each chunk.
|
||||
|
||||
For example, if the chunk_size is set to 100, the splitter will create chunks of 100 characters each. If the text is longer than 100 characters, it will be divided into multiple chunks of equal size, except for the last chunk, which may be smaller if there are remaining characters –defaults to `1000`.
|
||||
For example, if the chunk_size is set to 100, the splitter will create chunks of 100 characters each. If the text is longer than 100 characters, it will be divided into multiple chunks of equal size, except for the last chunk, which may be smaller if there are remaining characters –defaults to `1000`.
|
||||
|
||||
- **separator:** Specifies the character that will be used to split the text into chunks – defaults to `.`
|
||||
- **separator:** Specifies the character that will be used to split the text into chunks – defaults to `.`
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -44,6 +46,18 @@ The `RecursiveCharacterTextSplitter` splits the text by trying to keep paragra
|
|||
|
||||
- **chunk_size:** Determines the maximum number of characters in each chunk when splitting a text. It specifies the size or length of each chunk.
|
||||
|
||||
- **separator_type:** The parameter allows the user to split the code with multiple language support. It supports various languages such as Text, Ruby, Python, Solidity, Java, and more. Defaults to `Text`.
|
||||
- **separators:** The `separators` in RecursiveCharacterTextSplitter are the characters used to split the text into chunks. The text splitter tries to create chunks based on splitting on the first character in the list of `separators`. If any chunks are too large, it moves on to the next character in the list and continues splitting. Defaults to ["\n\n", "\n", " ", ""].
|
||||
|
||||
- **separators:** The `separators` in RecursiveCharacterTextSplitter are the characters used to split the text into chunks. The text splitter tries to create chunks based on splitting on the first character in the list of `separators`. If any chunks are too large, it moves on to the next character in the list and continues splitting. Defaults to `.`
|
||||
### LanguageRecursiveTextSplitter
|
||||
|
||||
The `LanguageRecursiveTextSplitter` is a text splitter that splits the text into smaller chunks based on the (programming) language of the text.
|
||||
|
||||
**Params**
|
||||
|
||||
- **Documents:** Input documents to split.
|
||||
|
||||
- **chunk_overlap:** Determines the number of characters that overlap between consecutive chunks when splitting text. It specifies how much of the previous chunk should be included in the next chunk.
|
||||
|
||||
- **chunk_size:** Determines the maximum number of characters in each chunk when splitting a text. It specifies the size or length of each chunk.
|
||||
|
||||
- **separator_type:** The parameter allows the user to split the code with multiple language support. It supports various languages such as Ruby, Python, Solidity, Java, and more. Defaults to `Python`.
|
||||
|
|
|
|||
|
|
@ -1,101 +0,0 @@
|
|||
# Deploy on Jina AI Cloud
|
||||
|
||||
Langflow integrates with langchain-serve to provide a one-command deployment to [Jina AI Cloud](https://github.com/jina-ai/langchain-serve).
|
||||
|
||||
Start by installing `langchain-serve` with
|
||||
|
||||
```bash
|
||||
pip install -U langchain-serve
|
||||
```
|
||||
|
||||
Then, run:
|
||||
|
||||
```bash
|
||||
langflow --jcloud
|
||||
```
|
||||
|
||||
```text
|
||||
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
|
||||
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://<your-app>.wolf.jina.ai/
|
||||
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
|
||||
```
|
||||
|
||||
**Complete (example) output:**
|
||||
|
||||
```text
|
||||
🚀 Deploying Langflow server on Jina AI Cloud
|
||||
╭───────────────────────── 🎉 Flow is available! ──────────────────────────╮
|
||||
│ │
|
||||
│ ID langflow-e3dd8820ec │
|
||||
│ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai │
|
||||
│ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec │
|
||||
│ │
|
||||
╰──────────────────────────────────────────────────────────────────────────╯
|
||||
╭──────────────┬──────────────────────────────────────────────────────────────────────────────╮
|
||||
│ App ID │ langflow-e3dd8820ec │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ Phase │ Serving │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ Endpoint │ wss://langflow-e3dd8820ec.wolf.jina.ai │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ App logs │ dashboards.wolf.jina.ai │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ Swagger UI │ https://langflow-e3dd8820ec.wolf.jina.ai/docs │
|
||||
├──────────────┼──────────────────────────────────────────────────────────────────────────────┤
|
||||
│ OpenAPI JSON │ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json │
|
||||
╰──────────────┴──────────────────────────────────────────────────────────────────────────────╯
|
||||
|
||||
🎉 Langflow server successfully deployed on Jina AI Cloud 🎉
|
||||
🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/
|
||||
📖 Read more about managing the server: https://github.com/jina-ai/langchain-serve
|
||||
```
|
||||
## API Usage (with python)
|
||||
|
||||
You can use Langflow directly on your browser or the API endpoints on Jina AI Cloud to interact with the server.
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict"
|
||||
FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468"
|
||||
# You can tweak the flow by adding a tweaks dictionary
|
||||
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
|
||||
TWEAKS = {
|
||||
"ChatOpenAI-g4jEr": {},
|
||||
"ConversationChain-UidfJ": {}
|
||||
}
|
||||
|
||||
def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
|
||||
"""
|
||||
Run a flow with a given message and optional tweaks.
|
||||
|
||||
:param message: The message to send to the flow
|
||||
:param flow_id: The ID of the flow to run
|
||||
:param tweaks: Optional tweaks to customize the flow
|
||||
:return: The JSON response from the flow
|
||||
"""
|
||||
api_url = f"{BASE_API_URL}/{flow_id}"
|
||||
|
||||
payload = {"message": message}
|
||||
|
||||
if tweaks:
|
||||
payload["tweaks"] = tweaks
|
||||
|
||||
response = requests.post(api_url, json=payload)
|
||||
return response.json()
|
||||
|
||||
# Setup any tweaks you want to apply to the flow
|
||||
print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!"
|
||||
}
|
||||
```
|
||||
|
||||
:::info
|
||||
|
||||
Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
|
||||
|
||||
:::
|
||||
147
docs/docs/guidelines/api.mdx
Normal file
147
docs/docs/guidelines/api.mdx
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
|
||||
# API Keys
|
||||
|
||||
## Introduction
|
||||
|
||||
Langflow offers an API Key functionality that allows users to access their individual components and flows without going through traditional login authentication. The API Key is a user-specific token that can be included in the request's header or query parameter to authenticate API calls. The following documentation outlines how to generate, use, and manage these API Keys in Langflow.
|
||||
|
||||
## Generating an API Key
|
||||
|
||||
### Through Langflow UI
|
||||
|
||||
{/* add image img/api-key.png */}
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: useBaseUrl("img/api-key.png"),
|
||||
}}
|
||||
style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }}
|
||||
/>
|
||||
|
||||
1. Click on the "API Key" icon.
|
||||
2. Click on "Create new secret key".
|
||||
3. Give it an optional name.
|
||||
4. Click on "Create secret key".
|
||||
5. Copy the API key and store it in a secure location.
|
||||
|
||||
## Using the API Key
|
||||
|
||||
### Using the `x-api-key` Header
|
||||
|
||||
Include the `x-api-key` in the HTTP header when making API requests:
|
||||
|
||||
```bash
|
||||
curl -X POST \
|
||||
http://localhost:3000/api/v1/process/<your_flow_id> \
|
||||
-H 'Content-Type: application/json'\
|
||||
-H 'x-api-key: <your api key>'\
|
||||
-d '{"inputs": {"text":""}, "tweaks": {}}'
|
||||
```
|
||||
|
||||
With Python using `requests`:
|
||||
|
||||
```python
|
||||
import requests
|
||||
from typing import Optional
|
||||
|
||||
BASE_API_URL = "http://localhost:3001/api/v1/process"
|
||||
FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df"
|
||||
# You can tweak the flow by adding a tweaks dictionary
|
||||
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
|
||||
TWEAKS = {}
|
||||
|
||||
def run_flow(inputs: dict,
|
||||
flow_id: str,
|
||||
tweaks: Optional[dict] = None,
|
||||
apiKey: Optional[str] = None) -> dict:
|
||||
"""
|
||||
Run a flow with a given message and optional tweaks.
|
||||
|
||||
:param message: The message to send to the flow
|
||||
:param flow_id: The ID of the flow to run
|
||||
:param tweaks: Optional tweaks to customize the flow
|
||||
:return: The JSON response from the flow
|
||||
"""
|
||||
api_url = f"{BASE_API_URL}/{flow_id}"
|
||||
|
||||
payload = {"inputs": inputs}
|
||||
headers = {}
|
||||
|
||||
if tweaks:
|
||||
payload["tweaks"] = tweaks
|
||||
if apiKey:
|
||||
headers = {"x-api-key": apiKey}
|
||||
|
||||
response = requests.post(api_url, json=payload, headers=headers)
|
||||
return response.json()
|
||||
|
||||
# Setup any tweaks you want to apply to the flow
|
||||
inputs = {"text":""}
|
||||
api_key = "<your api key>"
|
||||
print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key))
|
||||
```
|
||||
|
||||
### Using the Query Parameter
|
||||
|
||||
Alternatively, you can include the API key as a query parameter in the URL:
|
||||
|
||||
```bash
|
||||
curl -X POST \
|
||||
http://localhost:3000/api/v1/process/<your_flow_id>?x-api-key=<your_api_key> \
|
||||
-H 'Content-Type: application/json'\
|
||||
-d '{"inputs": {"text":""}, "tweaks": {}}'
|
||||
```
|
||||
|
||||
Or with Python:
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
BASE_API_URL = "http://localhost:3001/api/v1/process"
|
||||
FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df"
|
||||
# You can tweak the flow by adding a tweaks dictionary
|
||||
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
|
||||
TWEAKS = {}
|
||||
|
||||
def run_flow(inputs: dict,
|
||||
flow_id: str,
|
||||
tweaks: Optional[dict] = None,
|
||||
apiKey: Optional[str] = None) -> dict:
|
||||
"""
|
||||
Run a flow with a given message and optional tweaks.
|
||||
|
||||
:param message: The message to send to the flow
|
||||
:param flow_id: The ID of the flow to run
|
||||
:param tweaks: Optional tweaks to customize the flow
|
||||
:return: The JSON response from the flow
|
||||
"""
|
||||
api_url = f"{BASE_API_URL}/{flow_id}"
|
||||
|
||||
payload = {"inputs": inputs}
|
||||
headers = {}
|
||||
|
||||
if tweaks:
|
||||
payload["tweaks"] = tweaks
|
||||
if apiKey:
|
||||
api_url += f"?x-api-key={apiKey}"
|
||||
|
||||
response = requests.post(api_url, json=payload, headers=headers)
|
||||
return response.json()
|
||||
|
||||
# Setup any tweaks you want to apply to the flow
|
||||
inputs = {"text":""}
|
||||
api_key = "<your api key>"
|
||||
print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key))
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- **Visibility**: The API key won't be retrievable again through the UI for security reasons.
|
||||
- **Scope**: The key only allows access to the flows and components of the specific user to whom it was issued.
|
||||
|
||||
## Revoking an API Key
|
||||
|
||||
To revoke an API key, simply delete it from the UI. This will immediately invalidate the key and prevent it from being used again.
|
||||
73
docs/docs/guidelines/async-api.mdx
Normal file
73
docs/docs/guidelines/async-api.mdx
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Asynchronous Processing
|
||||
|
||||
## Introduction
|
||||
|
||||
Starting from version 0.5, Langflow introduces a new feature to its API: the _`sync`_ flag. This flag allows users to opt for asynchronous processing of their flows, freeing up resources and enabling better control over long-running tasks.
|
||||
This feature supports running tasks in a Celery worker queue and AnyIO task groups for now.
|
||||
|
||||
<Admonition type="warning" caption="Experimental Feature">
|
||||
This is an experimental feature. The default behavior of the API is still
|
||||
synchronous processing. The API may change in the future.
|
||||
</Admonition>
|
||||
|
||||
## The _`sync`_ Flag
|
||||
|
||||
The _`sync`_ flag can be included in the payload of your POST request to the _`/api/v1/process/<your_flow_id>`_ endpoint.
|
||||
When set to _`false`_, the API will initiate an asynchronous task instead of processing the flow synchronously.
|
||||
|
||||
### API Request with _`sync`_ flag
|
||||
|
||||
```bash
|
||||
curl -X POST \
|
||||
http://localhost:3000/api/v1/process/<your_flow_id> \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'x-api-key: <your_api_key>' \
|
||||
-d '{"inputs": {"text": ""}, "tweaks": {}, "sync": false}'
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": {
|
||||
"output": "..."
|
||||
},
|
||||
"task": {
|
||||
"id": "...",
|
||||
"href": "api/v1/task/<task_id>"
|
||||
},
|
||||
"session_id": "...",
|
||||
"backend": "..." // celery or anyio
|
||||
}
|
||||
```
|
||||
|
||||
## Checking Task Status
|
||||
|
||||
You can check the status of an asynchronous task by making a GET request to the `/task/{task_id}` endpoint.
|
||||
|
||||
```bash
|
||||
curl -X GET \
|
||||
http://localhost:3000/api/v1/task/<task_id> \
|
||||
-H 'x-api-key: <your_api_key>'
|
||||
```
|
||||
|
||||
### Response
|
||||
|
||||
The endpoint will return the current status of the task and, if completed, the result of the task. Possible statuses include:
|
||||
|
||||
- _`PENDING`_: The task is waiting for execution.
|
||||
- _`SUCCESS`_: The task has completed successfully.
|
||||
- _`FAILURE`_: The task has failed.
|
||||
|
||||
Example response for a completed task:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "SUCCESS",
|
||||
"result": {
|
||||
"output": "..."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
|
@ -387,7 +387,7 @@ Your structure should look something like this:
|
|||
The recommended way to load custom components is to set the _`LANGFLOW_COMPONENTS_PATH`_ environment variable to the path of your custom components directory. Then, run the Langflow CLI as usual.
|
||||
|
||||
```bash
|
||||
export LANGFLOW_COMPONENTS_PATH=/path/to/components
|
||||
export LANGFLOW_COMPONENTS_PATH='["/path/to/components"]'
|
||||
langflow
|
||||
```
|
||||
|
||||
|
|
|
|||
128
docs/docs/guidelines/login.mdx
Normal file
128
docs/docs/guidelines/login.mdx
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import ReactPlayer from "react-player";
|
||||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Sign up and Sign in
|
||||
|
||||
## Introduction
|
||||
|
||||
The login functionality in Langflow serves to authenticate users and protect sensitive routes in the application. Starting from version 0.5, Langflow introduces an enhanced login mechanism that is governed by a few environment variables. This allows new secure features.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
The following environment variables are crucial in configuring the login settings:
|
||||
|
||||
- _`LANGFLOW_AUTO_LOGIN`_: Determines whether Langflow should automatically log users in. Default is `True`.
|
||||
- _`LANGFLOW_SUPERUSER`_: The username of the superuser.
|
||||
- _`LANGFLOW_SUPERUSER_PASSWORD`_: The password for the superuser.
|
||||
- _`LANGFLOW_SECRET_KEY`_: A key used for encrypting the superuser's password.
|
||||
- _`LANGFLOW_NEW_USER_IS_ACTIVE`_: Determines whether new users are automatically activated. Default is `False`.
|
||||
|
||||
All of these variables can be passed to the CLI command _`langflow run`_ through the _`--env-file`_ option. For example:
|
||||
|
||||
```bash
|
||||
langflow run --env-file .env
|
||||
```
|
||||
|
||||
<Admonition type="info">
|
||||
It is critical not to expose these environment variables in your code
|
||||
repository. Always set them securely in your deployment environment, for
|
||||
example, using Docker secrets, Kubernetes ConfigMaps/Secrets, or dedicated
|
||||
secure environment configuration systems like AWS Secrets Manager.
|
||||
</Admonition>
|
||||
|
||||
### _`LANGFLOW_AUTO_LOGIN`_
|
||||
|
||||
By default, this variable is set to `True`. When enabled (`True`), Langflow operates as it did in versions prior to 0.5—automatic login without requiring explicit user authentication.
|
||||
|
||||
To disable automatic login and enforce user authentication:
|
||||
|
||||
```bash
|
||||
export LANGFLOW_AUTO_LOGIN=False
|
||||
```
|
||||
|
||||
### _`LANGFLOW_SUPERUSER`_ and _`LANGFLOW_SUPERUSER_PASSWORD`_
|
||||
|
||||
These environment variables are only relevant when `LANGFLOW_AUTO_LOGIN` is set to `False`. They specify the username and password for the superuser, which is essential for administrative tasks.
|
||||
|
||||
To create a superuser manually:
|
||||
|
||||
```bash
|
||||
export LANGFLOW_SUPERUSER=admin
|
||||
export LANGFLOW_SUPERUSER_PASSWORD=securepassword
|
||||
```
|
||||
|
||||
You can also use the CLI command `langflow superuser` to set up a superuser interactively.
|
||||
|
||||
### _`LANGFLOW_SECRET_KEY`_
|
||||
|
||||
This environment variable holds a secret key used for encrypting the superuser's password. Make sure to set this to a secure, randomly generated string.
|
||||
|
||||
```bash
|
||||
export LANGFLOW_SECRET_KEY=randomly_generated_secure_key
|
||||
```
|
||||
|
||||
### _`LANGFLOW_NEW_USER_IS_ACTIVE`_
|
||||
|
||||
By default, this variable is set to `False`. When enabled (`True`), new users are automatically activated and can log in without requiring explicit activation by the superuser.
|
||||
|
||||
## Command-Line Interface
|
||||
|
||||
Langflow provides a command-line utility for managing superusers:
|
||||
|
||||
```bash
|
||||
langflow superuser
|
||||
```
|
||||
|
||||
This command prompts you to enter the username and password for the superuser, unless they are already set using environment variables.
|
||||
|
||||
## Sign-up
|
||||
|
||||
With _`LANGFLOW_AUTO_LOGIN`_ set to _`False`_, Langflow requires users to sign up before they can log in. The sign-up page is the default landing page when a user visits Langflow for the first time.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: useBaseUrl("img/sign-up.png"),
|
||||
}}
|
||||
style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }}
|
||||
/>
|
||||
|
||||
## Profile settings
|
||||
|
||||
Users can change their profile settings by clicking on the profile icon in the top right corner of the application. This opens a dropdown menu with the following options:
|
||||
|
||||
- **Admin Page**: Opens the admin page, which is only accessible to the superuser.
|
||||
- **Profile Settings**: Opens the profile settings page.
|
||||
- **Sign Out**: Logs the user out.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: useBaseUrl("img/my-account.png"),
|
||||
}}
|
||||
style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }}
|
||||
/>
|
||||
|
||||
By clicking on **Profile Settings**, the user is taken to the profile settings page, where they can change their password and their profile picture.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: useBaseUrl("img/profile-settings.png"),
|
||||
}}
|
||||
style={{ maxWidth: "600px", margin: "0 auto" }}
|
||||
/>
|
||||
|
||||
By clicking on **Admin Page**, the superuser is taken to the admin page, where they can manage users and groups.
|
||||
|
||||
<ZoomableImage
|
||||
alt="Docusaurus themed image"
|
||||
sources={{
|
||||
light: useBaseUrl("img/admin-page.png"),
|
||||
}}
|
||||
style={{ maxWidth: "600px", margin: "0 auto" }}
|
||||
|
||||
/>
|
||||
44
docs/docs/guides/async-tasks.mdx
Normal file
44
docs/docs/guides/async-tasks.mdx
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import Admonition from "@theme/Admonition";
|
||||
|
||||
# Async API
|
||||
|
||||
## Introduction
|
||||
|
||||
<Admonition type="info" caption="In development">
|
||||
This implementation is still in development. Contributions are welcome!
|
||||
</Admonition>
|
||||
|
||||
The Async API is an implementation of the Langflow API that uses [Celery](https://docs.celeryproject.org/en/stable/)
|
||||
to run the tasks asynchronously, using a message broker to send and receive messages, a result backend to store the results and a cache to store the task states and session data.
|
||||
|
||||
### Configuration
|
||||
|
||||
The folder _`./deploy`_ in the [Github repository](https://github.com/logspace-ai/langflow) contains a _`.env.example`_ file that can be used to configure a Langflow deployment.
|
||||
The file contains the variables required to configure a Celery worker queue, Redis cache and result backend and a RabbitMQ message broker.
|
||||
|
||||
To set it up locally you can copy the file to _`.env`_ and run the following command:
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This will set up the following containers:
|
||||
|
||||
- Langflow API
|
||||
- Celery worker
|
||||
- RabbitMQ message broker
|
||||
- Redis cache
|
||||
- PostgreSQL database
|
||||
- PGAdmin
|
||||
- Flower
|
||||
- Traefik
|
||||
- Grafana
|
||||
- Prometheus
|
||||
|
||||
### Testing
|
||||
|
||||
To run the tests for the Async API, you can run the following command:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.with_tests.yml up --exit-code-from tests tests result_backend broker celeryworker db --build
|
||||
```
|
||||
49
docs/docs/guides/langfuse_integration.mdx
Normal file
49
docs/docs/guides/langfuse_integration.mdx
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
# Integrating Langfuse with Langflow
|
||||
|
||||
## Introduction
|
||||
|
||||
Langfuse is an open-source tracing and analytics tool designed for LLM applications. Integrating Langfuse with Langflow provides detailed production traces and granular insights into quality, cost, and latency. This integration allows you to monitor and debug your Langflow's chat or APIs easily.
|
||||
|
||||
## Step-by-Step Instructions
|
||||
|
||||
### Step 1: Create a Langfuse account
|
||||
|
||||
1. Go to [Langfuse](https://langfuse.com) and click on the "Sign In" button in the top right corner.
|
||||
2. Click on the "Sign Up" button and create an account.
|
||||
3. Once logged in, click on "Settings" and then on "Create new API keys."
|
||||
4. Copy the Public key and the Secret Key and save them somewhere safe.
|
||||
{/* Add these keys to your environment variables in the following step. */}
|
||||
|
||||
### Step 2: Set up Langfuse in Langflow
|
||||
|
||||
1. **Export the Environment Variables**: You'll need to export the environment variables `LANGFLOW_LANGFUSE_SECRET_KEY` and `LANGFLOW_LANGFUSE_PUBLIC_KEY` with the values obtained in Step 1.
|
||||
|
||||
You can do this by executing the following commands in your terminal:
|
||||
|
||||
```bash
|
||||
export LANGFLOW_LANGFUSE_SECRET_KEY=<your secret key>
|
||||
export LANGFLOW_LANGFUSE_PUBLIC_KEY=<your public key>
|
||||
```
|
||||
|
||||
Alternatively, you can run the Langflow CLI command:
|
||||
|
||||
```bash
|
||||
LANGFLOW_LANGFUSE_SECRET_KEY=<your secret key> LANGFLOW_LANGFUSE_PUBLIC_KEY=<your public key> langflow
|
||||
```
|
||||
|
||||
If you are self-hosting Langfuse, you can also set the environment variable `LANGFLOW_LANGFUSE_HOST` to point to your Langfuse instance. By default, Langfuse points to the cloud instance at `https://cloud.langfuse.com`.
|
||||
|
||||
2. **Verify Integration**: Ensure that the environment variables are set correctly by checking their existence in your environment, for example by running:
|
||||
|
||||
```bash
|
||||
echo $LANGFLOW_LANGFUSE_SECRET_KEY
|
||||
echo $LANGFLOW_LANGFUSE_PUBLIC_KEY
|
||||
```
|
||||
|
||||
3. **Monitor Langflow**: Now, whenever you use Langflow's chat or API, you will be able to see the tracing of your conversations in Langfuse.
|
||||
|
||||
That's it! You have successfully integrated Langfuse with Langflow, enhancing observability and debugging capabilities for your LLM application.
|
||||
|
||||
---
|
||||
|
||||
Note: For more details or customized configurations, please refer to the official [Langfuse documentation](https://langfuse.com/docs/integrations/langchain).
|
||||
7
docs/docs/guides/superuser.mdx
Normal file
7
docs/docs/guides/superuser.mdx
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
import ThemedImage from "@theme/ThemedImage";
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
import ZoomableImage from "/src/theme/ZoomableImage.js";
|
||||
import ReactPlayer from "react-player";
|
||||
|
||||
Now, we need to explain what are the permissions the superuser gets. Once logged in, they can activate new users,
|
||||
edit them,
|
||||
8
docs/package-lock.json
generated
8
docs/package-lock.json
generated
|
|
@ -28,7 +28,7 @@
|
|||
"medium-zoom": "^1.0.8",
|
||||
"node-fetch": "^3.3.1",
|
||||
"path-browserify": "^1.0.1",
|
||||
"postcss": "^8.4.24",
|
||||
"postcss": "^8.4.31",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
|
|
@ -13956,9 +13956,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.25",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.25.tgz",
|
||||
"integrity": "sha512-7taJ/8t2av0Z+sQEvNzCkpDynl0tX3uJMCODi6nT3PfASC7dYCWV9aQ+uiCf+KBD4SEFcu+GvJdGdwzQ6OSjCw==",
|
||||
"version": "8.4.31",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz",
|
||||
"integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@
|
|||
"medium-zoom": "^1.0.8",
|
||||
"node-fetch": "^3.3.1",
|
||||
"path-browserify": "^1.0.1",
|
||||
"postcss": "^8.4.24",
|
||||
"postcss": "^8.4.31",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
|
|
|
|||
|
|
@ -16,6 +16,9 @@ module.exports = {
|
|||
label: "Guidelines",
|
||||
collapsed: false,
|
||||
items: [
|
||||
"guidelines/login",
|
||||
"guidelines/api",
|
||||
"guidelines/async-api",
|
||||
"guidelines/components",
|
||||
"guidelines/features",
|
||||
"guidelines/collection",
|
||||
|
|
@ -51,7 +54,12 @@ module.exports = {
|
|||
type: "category",
|
||||
label: "Step-by-Step Guides",
|
||||
collapsed: false,
|
||||
items: ["guides/loading_document", "guides/chatprompttemplate_guide"],
|
||||
items: [
|
||||
"guides/async-tasks",
|
||||
"guides/loading_document",
|
||||
"guides/chatprompttemplate_guide",
|
||||
"guides/langfuse_integration",
|
||||
],
|
||||
},
|
||||
// {
|
||||
// type: 'category',
|
||||
|
|
@ -83,7 +91,7 @@ module.exports = {
|
|||
type: "category",
|
||||
label: "Deployment",
|
||||
collapsed: false,
|
||||
items: ["deployment/gcp-deployment", "deployment/jina-deployment"],
|
||||
items: ["deployment/gcp-deployment"],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
|
|
|
|||
BIN
docs/static/img/admin-page.png
vendored
Normal file
BIN
docs/static/img/admin-page.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 171 KiB |
BIN
docs/static/img/api-key.png
vendored
Normal file
BIN
docs/static/img/api-key.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.9 KiB |
BIN
docs/static/img/my-account.png
vendored
Normal file
BIN
docs/static/img/my-account.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
BIN
docs/static/img/profile-settings.png
vendored
Normal file
BIN
docs/static/img/profile-settings.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 341 KiB |
BIN
docs/static/img/sign-up.png
vendored
Normal file
BIN
docs/static/img/sign-up.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 67 KiB |
1603
poetry.lock
generated
1603
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "0.5.0a0"
|
||||
version = "0.5.1"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
@ -26,39 +26,38 @@ langflow = "langflow.__main__:main"
|
|||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<3.11"
|
||||
fastapi = "^0.100.0"
|
||||
uvicorn = "^0.22.0"
|
||||
fastapi = "^0.103.0"
|
||||
uvicorn = "^0.23.0"
|
||||
beautifulsoup4 = "^4.12.2"
|
||||
google-search-results = "^2.4.1"
|
||||
google-api-python-client = "^2.79.0"
|
||||
typer = "^0.9.0"
|
||||
gunicorn = "^21.1.0"
|
||||
langchain = "^0.0.274"
|
||||
gunicorn = "^21.2.0"
|
||||
langchain = "^0.0.308"
|
||||
openai = "^0.27.8"
|
||||
pandas = "2.0.3"
|
||||
chromadb = "^0.3.21"
|
||||
huggingface-hub = { version = "^0.16.0", extras = ["inference"] }
|
||||
rich = "^13.4.2"
|
||||
rich = "^13.5.0"
|
||||
llama-cpp-python = { version = "~0.1.0", optional = true }
|
||||
networkx = "^3.1"
|
||||
unstructured = "^0.7.0"
|
||||
pypdf = "^3.11.0"
|
||||
unstructured = "^0.10.0"
|
||||
pypdf = "^3.15.0"
|
||||
lxml = "^4.9.2"
|
||||
pysrt = "^1.1.2"
|
||||
fake-useragent = "^1.2.1"
|
||||
docstring-parser = "^0.15"
|
||||
psycopg2-binary = "^2.9.6"
|
||||
pyarrow = "^12.0.0"
|
||||
tiktoken = "~0.4.0"
|
||||
tiktoken = "~0.5.0"
|
||||
wikipedia = "^1.4.0"
|
||||
langchain-serve = { version = ">0.0.51", optional = true }
|
||||
qdrant-client = "^1.3.0"
|
||||
qdrant-client = "^1.4.0"
|
||||
websockets = "^10.3"
|
||||
weaviate-client = "^3.21.0"
|
||||
weaviate-client = "^3.23.0"
|
||||
jina = "3.15.2"
|
||||
sentence-transformers = { version = "^2.2.2", optional = true }
|
||||
ctransformers = { version = "^0.2.10", optional = true }
|
||||
cohere = "^4.11.0"
|
||||
cohere = "^4.27.0"
|
||||
python-multipart = "^0.0.6"
|
||||
sqlmodel = "^0.0.8"
|
||||
faiss-cpu = "^1.7.4"
|
||||
|
|
@ -77,16 +76,24 @@ psycopg = "^3.1.9"
|
|||
psycopg-binary = "^3.1.9"
|
||||
fastavro = "^1.8.0"
|
||||
langchain-experimental = "^0.0.8"
|
||||
alembic = "^1.11.2"
|
||||
celery = { extras = ["redis"], version = "^5.3.1", optional = true }
|
||||
redis = { version = "^4.6.0", optional = true }
|
||||
flower = { version = "^2.0.0", optional = true }
|
||||
alembic = "^1.12.0"
|
||||
passlib = "^1.7.4"
|
||||
bcrypt = "^4.0.1"
|
||||
python-jose = "^3.3.0"
|
||||
metaphor-python = "^0.1.11"
|
||||
markupsafe = "^2.1.3"
|
||||
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
|
||||
loguru = "^0.7.1"
|
||||
langfuse = "^1.0.13"
|
||||
pillow = "^10.0.0"
|
||||
metal-sdk = "^2.2.0"
|
||||
markupsafe = "^2.1.3"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
types-redis = "^4.6.0.5"
|
||||
black = "^23.1.0"
|
||||
ipykernel = "^6.21.2"
|
||||
mypy = "^1.1.1"
|
||||
|
|
@ -102,13 +109,16 @@ types-appdirs = "^1.4.3.5"
|
|||
types-pyyaml = "^6.0.12.8"
|
||||
types-python-jose = "^3.3.4.8"
|
||||
types-passlib = "^1.7.7.13"
|
||||
locust = "^2.16.1"
|
||||
pytest-mock = "^3.11.1"
|
||||
pytest-xdist = "^3.3.1"
|
||||
types-pywin32 = "^306.0.0.4"
|
||||
types-google-cloud-ndb = "^2.2.0.0"
|
||||
pytest-sugar = "^0.9.7"
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
deploy = ["langchain-serve"]
|
||||
deploy = ["langchain-serve", "celery", "redis", "flower"]
|
||||
local = ["llama-cpp-python", "sentence-transformers", "ctransformers"]
|
||||
all = ["deploy", "local"]
|
||||
|
||||
|
|
@ -120,6 +130,7 @@ testpaths = ["tests", "integration"]
|
|||
console_output_style = "progress"
|
||||
filterwarnings = ["ignore::DeprecationWarning"]
|
||||
log_cli = true
|
||||
markers = ["async_test"]
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
|
|
|
|||
|
|
@ -3,9 +3,14 @@ services:
|
|||
- type: web
|
||||
name: langflow
|
||||
runtime: docker
|
||||
plan: free
|
||||
dockerfilePath: ./Dockerfile
|
||||
repo: https://github.com/logspace-ai/langflow
|
||||
branch: main
|
||||
healthCheckPath: /health
|
||||
autoDeploy: false
|
||||
envVars:
|
||||
- key: LANGFLOW_DATABASE_URL
|
||||
value: sqlite:////home/user/.cache/langflow/langflow.db
|
||||
disk:
|
||||
name: langflow-data
|
||||
mountPath: /home/user/.cache/langflow
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from importlib import metadata
|
||||
|
||||
# Deactivate cache manager for now
|
||||
# from langflow.services.cache import cache_manager
|
||||
# from langflow.services.cache import cache_service
|
||||
from langflow.processing.process import load_flow_from_json
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
|
|
@ -12,4 +12,4 @@ except metadata.PackageNotFoundError:
|
|||
__version__ = ""
|
||||
del metadata # optional, avoids polluting the results of dir(__package__)
|
||||
|
||||
__all__ = ["load_flow_from_json", "cache_manager", "CustomComponent"]
|
||||
__all__ = ["load_flow_from_json", "cache_service", "CustomComponent"]
|
||||
|
|
|
|||
|
|
@ -1,30 +1,29 @@
|
|||
import platform
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import httpx
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.manager import initialize_services, initialize_settings_manager
|
||||
from langflow.services.utils import get_db_manager, get_settings_manager
|
||||
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
import platform
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
import socket
|
||||
from rich.panel import Panel
|
||||
|
||||
import httpx
|
||||
import typer
|
||||
from dotenv import load_dotenv
|
||||
from langflow.main import setup_app
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.getters import get_db_service, get_settings_service
|
||||
from langflow.services.utils import initialize_services, initialize_settings_service
|
||||
from langflow.utils.logger import configure, logger
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
from rich import box
|
||||
from rich import print as rprint
|
||||
from rich.table import Table
|
||||
import typer
|
||||
from langflow.main import setup_app
|
||||
from langflow.utils.logger import configure, logger
|
||||
import webbrowser
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
|
||||
console = Console()
|
||||
|
||||
app = typer.Typer()
|
||||
app = typer.Typer(no_args_is_help=True)
|
||||
|
||||
|
||||
def get_number_of_workers(workers=None):
|
||||
|
|
@ -53,9 +52,21 @@ def display_results(results):
|
|||
console.print() # Print a new line
|
||||
|
||||
|
||||
def set_var_for_macos_issue():
|
||||
# OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
|
||||
# we need to set this var is we are running on MacOS
|
||||
# otherwise we get an error when running gunicorn
|
||||
|
||||
if platform.system() in ["Darwin"]:
|
||||
import os
|
||||
|
||||
os.environ["OBJC_DISABLE_INITIALIZE_FORK_SAFETY"] = "YES"
|
||||
logger.debug("Set OBJC_DISABLE_INITIALIZE_FORK_SAFETY to YES to avoid error")
|
||||
|
||||
|
||||
def update_settings(
|
||||
config: str,
|
||||
cache: str,
|
||||
cache: Optional[str] = None,
|
||||
dev: bool = False,
|
||||
remove_api_keys: bool = False,
|
||||
components_path: Optional[Path] = None,
|
||||
|
|
@ -63,66 +74,20 @@ def update_settings(
|
|||
"""Update the settings from a config file."""
|
||||
|
||||
# Check for database_url in the environment variables
|
||||
initialize_settings_manager()
|
||||
settings_manager = get_settings_manager()
|
||||
initialize_settings_service()
|
||||
settings_service = get_settings_service()
|
||||
if config:
|
||||
logger.debug(f"Loading settings from {config}")
|
||||
settings_manager.settings.update_from_yaml(config, dev=dev)
|
||||
settings_service.settings.update_from_yaml(config, dev=dev)
|
||||
if remove_api_keys:
|
||||
logger.debug(f"Setting remove_api_keys to {remove_api_keys}")
|
||||
settings_manager.settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
|
||||
settings_service.settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
|
||||
if cache:
|
||||
logger.debug(f"Setting cache to {cache}")
|
||||
settings_manager.settings.update_settings(CACHE=cache)
|
||||
settings_service.settings.update_settings(CACHE=cache)
|
||||
if components_path:
|
||||
logger.debug(f"Adding component path {components_path}")
|
||||
settings_manager.settings.update_settings(COMPONENTS_PATH=components_path)
|
||||
|
||||
|
||||
def serve_on_jcloud():
|
||||
"""
|
||||
Deploy Langflow server on Jina AI Cloud
|
||||
"""
|
||||
import asyncio
|
||||
from importlib.metadata import version as mod_version
|
||||
|
||||
import click
|
||||
|
||||
try:
|
||||
from lcserve.__main__ import serve_on_jcloud # type: ignore
|
||||
except ImportError:
|
||||
click.secho(
|
||||
"🚨 Please install langchain-serve to deploy Langflow server on Jina AI Cloud "
|
||||
"using `pip install langchain-serve`",
|
||||
fg="red",
|
||||
)
|
||||
return
|
||||
|
||||
app_name = "langflow.lcserve:app"
|
||||
app_dir = str(Path(__file__).parent)
|
||||
version = mod_version("langflow")
|
||||
base_image = "jinaai+docker://deepankarm/langflow"
|
||||
|
||||
click.echo("🚀 Deploying Langflow server on Jina AI Cloud")
|
||||
app_id = asyncio.run(
|
||||
serve_on_jcloud(
|
||||
fastapi_app_str=app_name,
|
||||
app_dir=app_dir,
|
||||
uses=f"{base_image}:{version}",
|
||||
name="langflow",
|
||||
)
|
||||
)
|
||||
click.secho(
|
||||
"🎉 Langflow server successfully deployed on Jina AI Cloud 🎉", fg="green"
|
||||
)
|
||||
click.secho(
|
||||
"🔗 Click on the link to open the server (please allow ~1-2 minutes for the server to startup): ",
|
||||
nl=False,
|
||||
fg="green",
|
||||
)
|
||||
click.secho(f"https://{app_id}.wolf.jina.ai/", fg="blue")
|
||||
click.secho("📖 Read more about managing the server: ", nl=False, fg="green")
|
||||
click.secho("https://github.com/jina-ai/langchain-serve", fg="blue")
|
||||
settings_service.settings.update_settings(COMPONENTS_PATH=components_path)
|
||||
|
||||
|
||||
@app.command()
|
||||
|
|
@ -131,7 +96,7 @@ def run(
|
|||
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
|
||||
),
|
||||
workers: int = typer.Option(
|
||||
2, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
|
||||
1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"
|
||||
),
|
||||
timeout: int = typer.Option(300, help="Worker timeout in seconds."),
|
||||
port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"),
|
||||
|
|
@ -153,12 +118,11 @@ def run(
|
|||
log_file: Path = typer.Option(
|
||||
"logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"
|
||||
),
|
||||
cache: str = typer.Option(
|
||||
cache: Optional[str] = typer.Option(
|
||||
envvar="LANGFLOW_LANGCHAIN_CACHE",
|
||||
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
|
||||
default="SQLiteCache",
|
||||
default=None,
|
||||
),
|
||||
jcloud: bool = typer.Option(False, help="Deploy on Jina AI Cloud"),
|
||||
dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"),
|
||||
# This variable does not work but is set by the .env file
|
||||
# and works with Pydantic
|
||||
|
|
@ -189,15 +153,15 @@ def run(
|
|||
),
|
||||
):
|
||||
"""
|
||||
Run the Langflow server.
|
||||
Run the Langflow.
|
||||
"""
|
||||
|
||||
set_var_for_macos_issue()
|
||||
# override env variables with .env file
|
||||
|
||||
if env_file:
|
||||
load_dotenv(env_file, override=True)
|
||||
|
||||
if jcloud:
|
||||
return serve_on_jcloud()
|
||||
|
||||
configure(log_level=log_level, log_file=log_file)
|
||||
update_settings(
|
||||
config,
|
||||
|
|
@ -216,7 +180,6 @@ def run(
|
|||
options = {
|
||||
"bind": f"{host}:{port}",
|
||||
"workers": get_number_of_workers(workers),
|
||||
"worker_class": "uvicorn.workers.UvicornWorker",
|
||||
"timeout": timeout,
|
||||
}
|
||||
|
||||
|
|
@ -350,18 +313,25 @@ def superuser(
|
|||
password: str = typer.Option(
|
||||
..., prompt=True, hide_input=True, help="Password for the superuser."
|
||||
),
|
||||
log_level: str = typer.Option(
|
||||
"critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"
|
||||
),
|
||||
):
|
||||
"""
|
||||
Create a superuser.
|
||||
"""
|
||||
configure(log_level=log_level)
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
with session_getter(db_manager) as session:
|
||||
db_service = get_db_service()
|
||||
with session_getter(db_service) as session:
|
||||
from langflow.services.auth.utils import create_super_user
|
||||
|
||||
if create_super_user(db=session, username=username, password=password):
|
||||
# Verify that the superuser was created
|
||||
from langflow.services.database.models.user.user import User
|
||||
|
||||
user = session.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
user: User = session.query(User).filter(User.username == username).first()
|
||||
if user is None or not user.is_superuser:
|
||||
typer.echo("Superuser creation failed.")
|
||||
return
|
||||
|
||||
|
|
@ -372,12 +342,15 @@ def superuser(
|
|||
|
||||
|
||||
@app.command()
|
||||
def migration(test: bool = typer.Option(False, help="Run migrations in test mode.")):
|
||||
def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")):
|
||||
"""
|
||||
Run or test migrations.
|
||||
"""
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
db_service = get_db_service()
|
||||
if not test:
|
||||
db_manager.run_migrations()
|
||||
results = db_manager.run_migrations_test()
|
||||
db_service.run_migrations()
|
||||
results = db_service.run_migrations_test()
|
||||
display_results(results)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
"""Add profile-image column
|
||||
|
||||
Revision ID: 67cc006d50bf
|
||||
Revises: 260dbcc8b680
|
||||
Create Date: 2023-09-08 07:36:13.387318
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "67cc006d50bf"
|
||||
down_revision: Union[str, None] = "260dbcc8b680"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
if "user" in inspector.get_table_names() and "profile_image" not in [
|
||||
column["name"] for column in inspector.get_columns("user")
|
||||
]:
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"profile_image", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
||||
)
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
if "user" in inspector.get_table_names() and "profile_image" in [
|
||||
column["name"] for column in inspector.get_columns("user")
|
||||
]:
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.drop_column("profile_image")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
"""Change columns to be nullable
|
||||
|
||||
Revision ID: eb5866d51fd2
|
||||
Revises: 67cc006d50bf
|
||||
Create Date: 2023-10-04 10:18:25.640458
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel # noqa: F401
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "eb5866d51fd2"
|
||||
down_revision: Union[str, None] = "67cc006d50bf"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
try:
|
||||
op.drop_table("flowstyle")
|
||||
with op.batch_alter_table("component", schema=None) as batch_op:
|
||||
batch_op.drop_index("ix_component_frontend_node_id")
|
||||
batch_op.drop_index("ix_component_name")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
op.drop_table("component")
|
||||
except Exception:
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
try:
|
||||
op.create_table(
|
||||
"component",
|
||||
sa.Column("id", sa.CHAR(length=32), nullable=False),
|
||||
sa.Column("frontend_node_id", sa.CHAR(length=32), nullable=False),
|
||||
sa.Column("name", sa.VARCHAR(), nullable=False),
|
||||
sa.Column("description", sa.VARCHAR(), nullable=True),
|
||||
sa.Column("python_code", sa.VARCHAR(), nullable=True),
|
||||
sa.Column("return_type", sa.VARCHAR(), nullable=True),
|
||||
sa.Column("is_disabled", sa.BOOLEAN(), nullable=False),
|
||||
sa.Column("is_read_only", sa.BOOLEAN(), nullable=False),
|
||||
sa.Column("create_at", sa.DATETIME(), nullable=False),
|
||||
sa.Column("update_at", sa.DATETIME(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("component", schema=None) as batch_op:
|
||||
batch_op.create_index("ix_component_name", ["name"], unique=False)
|
||||
batch_op.create_index(
|
||||
"ix_component_frontend_node_id", ["frontend_node_id"], unique=False
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
op.create_table(
|
||||
"flowstyle",
|
||||
sa.Column("color", sa.VARCHAR(), nullable=False),
|
||||
sa.Column("emoji", sa.VARCHAR(), nullable=False),
|
||||
sa.Column("flow_id", sa.CHAR(length=32), nullable=True),
|
||||
sa.Column("id", sa.CHAR(length=32), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
["flow.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -59,33 +59,6 @@ def build_input_keys_response(langchain_object, artifacts):
|
|||
return input_keys_response
|
||||
|
||||
|
||||
def merge_nested_dicts(dict1, dict2):
|
||||
for key, value in dict2.items():
|
||||
if isinstance(value, dict) and isinstance(dict1.get(key), dict):
|
||||
dict1[key] = merge_nested_dicts(dict1[key], value)
|
||||
else:
|
||||
dict1[key] = value
|
||||
return dict1
|
||||
|
||||
|
||||
def merge_nested_dicts_with_renaming(dict1, dict2):
|
||||
for key, value in dict2.items():
|
||||
if (
|
||||
key in dict1
|
||||
and isinstance(value, dict)
|
||||
and isinstance(dict1.get(key), dict)
|
||||
):
|
||||
for sub_key, sub_value in value.items():
|
||||
if sub_key in dict1[key]:
|
||||
new_key = get_new_key(dict1[key], sub_key)
|
||||
dict1[key][new_key] = sub_value
|
||||
else:
|
||||
dict1[key][sub_key] = sub_value
|
||||
else:
|
||||
dict1[key] = value
|
||||
return dict1
|
||||
|
||||
|
||||
def get_new_key(dictionary, original_key):
|
||||
counter = 1
|
||||
new_key = original_key + " (" + str(counter) + ")"
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from langflow.services.database.models.api_key.crud import (
|
|||
delete_api_key,
|
||||
)
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.getters import get_session
|
||||
from sqlmodel import Session
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,17 @@
|
|||
import asyncio
|
||||
from uuid import UUID
|
||||
|
||||
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
|
||||
|
||||
from langflow.api.v1.schemas import ChatResponse
|
||||
from langflow.api.v1.schemas import ChatResponse, PromptResponse
|
||||
|
||||
|
||||
from typing import Any, Dict, List, Union
|
||||
from fastapi import WebSocket
|
||||
from typing import Any, Dict, List, Optional
|
||||
from langflow.services.getters import get_chat_service
|
||||
|
||||
|
||||
from langchain.schema import AgentAction, LLMResult, AgentFinish
|
||||
from langflow.utils.util import remove_ansi_escape_codes
|
||||
from langchain.schema import AgentAction, AgentFinish
|
||||
from loguru import logger
|
||||
|
||||
|
||||
|
|
@ -17,39 +19,15 @@ from loguru import logger
|
|||
class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
|
||||
"""Callback handler for streaming LLM responses."""
|
||||
|
||||
def __init__(self, websocket: WebSocket):
|
||||
self.websocket = websocket
|
||||
def __init__(self, client_id: str):
|
||||
self.chat_service = get_chat_service()
|
||||
self.client_id = client_id
|
||||
self.websocket = self.chat_service.active_connections[self.client_id]
|
||||
|
||||
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
|
||||
resp = ChatResponse(message=token, type="stream", intermediate_steps="")
|
||||
await self.websocket.send_json(resp.dict())
|
||||
|
||||
async def on_llm_start(
|
||||
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
|
||||
) -> Any:
|
||||
"""Run when LLM starts running."""
|
||||
|
||||
async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> Any:
|
||||
"""Run when LLM ends running."""
|
||||
|
||||
async def on_llm_error(
|
||||
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
|
||||
) -> Any:
|
||||
"""Run when LLM errors."""
|
||||
|
||||
async def on_chain_start(
|
||||
self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any
|
||||
) -> Any:
|
||||
"""Run when chain starts running."""
|
||||
|
||||
async def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> Any:
|
||||
"""Run when chain ends running."""
|
||||
|
||||
async def on_chain_error(
|
||||
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
|
||||
) -> Any:
|
||||
"""Run when chain errors."""
|
||||
|
||||
async def on_tool_start(
|
||||
self, serialized: Dict[str, Any], input_str: str, **kwargs: Any
|
||||
) -> Any:
|
||||
|
|
@ -95,8 +73,14 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
|
|||
logger.error(f"Error sending response: {exc}")
|
||||
|
||||
async def on_tool_error(
|
||||
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
|
||||
) -> Any:
|
||||
self,
|
||||
error: BaseException,
|
||||
*,
|
||||
run_id: UUID,
|
||||
parent_run_id: Optional[UUID] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Run when tool errors."""
|
||||
|
||||
async def on_text(self, text: str, **kwargs: Any) -> Any:
|
||||
|
|
@ -104,6 +88,14 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
|
|||
# This runs when first sending the prompt
|
||||
# to the LLM, adding it will send the final prompt
|
||||
# to the frontend
|
||||
if "Prompt after formatting" in text:
|
||||
text = text.replace("Prompt after formatting:\n", "")
|
||||
text = remove_ansi_escape_codes(text)
|
||||
resp = PromptResponse(
|
||||
prompt=text,
|
||||
)
|
||||
await self.websocket.send_json(resp.dict())
|
||||
self.chat_service.chat_history.add_message(self.client_id, resp)
|
||||
|
||||
async def on_agent_action(self, action: AgentAction, **kwargs: Any):
|
||||
log = f"Thought: {action.log}"
|
||||
|
|
@ -131,8 +123,10 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
|
|||
class StreamingLLMCallbackHandler(BaseCallbackHandler):
|
||||
"""Callback handler for streaming LLM responses."""
|
||||
|
||||
def __init__(self, websocket):
|
||||
self.websocket = websocket
|
||||
def __init__(self, client_id: str):
|
||||
self.chat_service = get_chat_service()
|
||||
self.client_id = client_id
|
||||
self.websocket = self.chat_service.active_connections[self.client_id]
|
||||
|
||||
def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
|
||||
resp = ChatResponse(message=token, type="stream", intermediate_steps="")
|
||||
|
|
|
|||
|
|
@ -13,17 +13,16 @@ from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, St
|
|||
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.services.auth.utils import get_current_active_user, get_current_user
|
||||
from langflow.services.cache.utils import update_build_status
|
||||
from loguru import logger
|
||||
from langflow.services.utils import get_chat_manager, get_session
|
||||
from cachetools import LRUCache
|
||||
from langflow.services.getters import get_chat_service, get_session, get_cache_service
|
||||
from sqlmodel import Session
|
||||
from langflow.services.chat.manager import ChatManager
|
||||
from langflow.services.chat.manager import ChatService
|
||||
from langflow.services.cache.manager import BaseCacheService
|
||||
|
||||
|
||||
router = APIRouter(tags=["Chat"])
|
||||
|
||||
flow_data_store: LRUCache = LRUCache(maxsize=10)
|
||||
|
||||
|
||||
@router.websocket("/chat/{client_id}")
|
||||
async def chat(
|
||||
|
|
@ -31,7 +30,7 @@ async def chat(
|
|||
websocket: WebSocket,
|
||||
token: str = Query(...),
|
||||
db: Session = Depends(get_session),
|
||||
chat_manager: "ChatManager" = Depends(get_chat_manager),
|
||||
chat_service: "ChatService" = Depends(get_chat_service),
|
||||
):
|
||||
"""Websocket endpoint for chat."""
|
||||
try:
|
||||
|
|
@ -46,15 +45,15 @@ async def chat(
|
|||
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
|
||||
)
|
||||
|
||||
if client_id in chat_manager.in_memory_cache:
|
||||
await chat_manager.handle_websocket(client_id, websocket)
|
||||
if client_id in chat_service.cache_service:
|
||||
await chat_service.handle_websocket(client_id, websocket)
|
||||
else:
|
||||
# We accept the connection but close it immediately
|
||||
# if the flow is not built yet
|
||||
message = "Please, build the flow before sending messages"
|
||||
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=message)
|
||||
except WebSocketException as exc:
|
||||
logger.error(f"Websocket error: {exc}")
|
||||
logger.error(f"Websocket exrror: {exc}")
|
||||
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc))
|
||||
except Exception as exc:
|
||||
logger.error(f"Error in chat websocket: {exc}")
|
||||
|
|
@ -72,26 +71,26 @@ async def init_build(
|
|||
graph_data: dict,
|
||||
flow_id: str,
|
||||
current_user=Depends(get_current_active_user),
|
||||
chat_manager: "ChatManager" = Depends(get_chat_manager),
|
||||
chat_service: "ChatService" = Depends(get_chat_service),
|
||||
cache_service: "BaseCacheService" = Depends(get_cache_service),
|
||||
):
|
||||
"""Initialize the build by storing graph data and returning a unique session ID."""
|
||||
|
||||
try:
|
||||
if flow_id is None:
|
||||
raise ValueError("No ID provided")
|
||||
# Check if already building
|
||||
if (
|
||||
flow_id in flow_data_store
|
||||
and flow_data_store[flow_id]["status"] == BuildStatus.IN_PROGRESS
|
||||
flow_id in cache_service
|
||||
and isinstance(cache_service[flow_id], dict)
|
||||
and cache_service[flow_id].get("status") == BuildStatus.IN_PROGRESS
|
||||
):
|
||||
return InitResponse(flowId=flow_id)
|
||||
|
||||
# Delete from cache if already exists
|
||||
if flow_id in chat_manager.in_memory_cache:
|
||||
with chat_manager.in_memory_cache._lock:
|
||||
chat_manager.in_memory_cache.delete(flow_id)
|
||||
logger.debug(f"Deleted flow {flow_id} from cache")
|
||||
flow_data_store[flow_id] = {
|
||||
if flow_id in chat_service.cache_service:
|
||||
chat_service.cache_service.delete(flow_id)
|
||||
logger.debug(f"Deleted flow {flow_id} from cache")
|
||||
cache_service[flow_id] = {
|
||||
"graph_data": graph_data,
|
||||
"status": BuildStatus.STARTED,
|
||||
"user_id": current_user.id,
|
||||
|
|
@ -104,12 +103,14 @@ async def init_build(
|
|||
|
||||
|
||||
@router.get("/build/{flow_id}/status", response_model=BuiltResponse)
|
||||
async def build_status(flow_id: str):
|
||||
"""Check the flow_id is in the flow_data_store."""
|
||||
async def build_status(
|
||||
flow_id: str, cache_service: "BaseCacheService" = Depends(get_cache_service)
|
||||
):
|
||||
"""Check the flow_id is in the cache_service."""
|
||||
try:
|
||||
built = (
|
||||
flow_id in flow_data_store
|
||||
and flow_data_store[flow_id]["status"] == BuildStatus.SUCCESS
|
||||
flow_id in cache_service
|
||||
and cache_service[flow_id]["status"] == BuildStatus.SUCCESS
|
||||
)
|
||||
|
||||
return BuiltResponse(
|
||||
|
|
@ -123,7 +124,9 @@ async def build_status(flow_id: str):
|
|||
|
||||
@router.get("/build/stream/{flow_id}", response_class=StreamingResponse)
|
||||
async def stream_build(
|
||||
flow_id: str, chat_manager: "ChatManager" = Depends(get_chat_manager)
|
||||
flow_id: str,
|
||||
chat_service: "ChatService" = Depends(get_chat_service),
|
||||
cache_service: "BaseCacheService" = Depends(get_cache_service),
|
||||
):
|
||||
"""Stream the build process based on stored flow data."""
|
||||
|
||||
|
|
@ -131,18 +134,18 @@ async def stream_build(
|
|||
final_response = {"end_of_stream": True}
|
||||
artifacts = {}
|
||||
try:
|
||||
if flow_id not in flow_data_store:
|
||||
if flow_id not in cache_service:
|
||||
error_message = "Invalid session ID"
|
||||
yield str(StreamData(event="error", data={"error": error_message}))
|
||||
return
|
||||
|
||||
if flow_data_store[flow_id].get("status") == BuildStatus.IN_PROGRESS:
|
||||
if cache_service[flow_id].get("status") == BuildStatus.IN_PROGRESS:
|
||||
error_message = "Already building"
|
||||
yield str(StreamData(event="error", data={"error": error_message}))
|
||||
return
|
||||
|
||||
graph_data = flow_data_store[flow_id].get("graph_data")
|
||||
user_id = flow_data_store[flow_id]["user_id"]
|
||||
graph_data = cache_service[flow_id].get("graph_data")
|
||||
cache_service[flow_id]["user_id"]
|
||||
|
||||
if not graph_data:
|
||||
error_message = "No data provided"
|
||||
|
|
@ -155,7 +158,7 @@ async def stream_build(
|
|||
graph = Graph.from_payload(graph_data)
|
||||
|
||||
number_of_nodes = len(graph.nodes)
|
||||
flow_data_store[flow_id]["status"] = BuildStatus.IN_PROGRESS
|
||||
update_build_status(cache_service, flow_id, BuildStatus.IN_PROGRESS)
|
||||
|
||||
for i, vertex in enumerate(graph.generator_build(), 1):
|
||||
try:
|
||||
|
|
@ -163,8 +166,10 @@ async def stream_build(
|
|||
"log": f"Building node {vertex.vertex_type}",
|
||||
}
|
||||
yield str(StreamData(event="log", data=log_dict))
|
||||
vertex.build(user_id)
|
||||
|
||||
if vertex.is_task:
|
||||
vertex = try_running_celery_task(vertex)
|
||||
else:
|
||||
vertex.build()
|
||||
params = vertex._built_object_repr()
|
||||
valid = True
|
||||
logger.debug(f"Building node {str(vertex.vertex_type)}")
|
||||
|
|
@ -180,7 +185,7 @@ async def stream_build(
|
|||
logger.exception(exc)
|
||||
params = str(exc)
|
||||
valid = False
|
||||
flow_data_store[flow_id]["status"] = BuildStatus.FAILURE
|
||||
update_build_status(cache_service, flow_id, BuildStatus.FAILURE)
|
||||
|
||||
vertex_id = (
|
||||
vertex.parent_node_id if vertex.parent_is_top_level else vertex.id
|
||||
|
|
@ -208,14 +213,15 @@ async def stream_build(
|
|||
"handle_keys": [],
|
||||
}
|
||||
yield str(StreamData(event="message", data=input_keys_response))
|
||||
chat_manager.set_cache(flow_id, langchain_object)
|
||||
chat_service.set_cache(flow_id, langchain_object)
|
||||
# We need to reset the chat history
|
||||
chat_manager.chat_history.empty_history(flow_id)
|
||||
flow_data_store[flow_id]["status"] = BuildStatus.SUCCESS
|
||||
chat_service.chat_history.empty_history(flow_id)
|
||||
update_build_status(cache_service, flow_id, BuildStatus.SUCCESS)
|
||||
except Exception as exc:
|
||||
logger.exception(exc)
|
||||
logger.error("Error while building the flow: %s", exc)
|
||||
flow_data_store[flow_id]["status"] = BuildStatus.FAILURE
|
||||
|
||||
update_build_status(cache_service, flow_id, BuildStatus.FAILURE)
|
||||
yield str(StreamData(event="error", data={"error": str(exc)}))
|
||||
finally:
|
||||
yield str(StreamData(event="message", data=final_response))
|
||||
|
|
@ -225,3 +231,19 @@ async def stream_build(
|
|||
except Exception as exc:
|
||||
logger.error(f"Error streaming build: {exc}")
|
||||
raise HTTPException(status_code=500, detail=str(exc))
|
||||
|
||||
|
||||
def try_running_celery_task(vertex):
|
||||
# Try running the task in celery
|
||||
# and set the task_id to the local vertex
|
||||
# if it fails, run the task locally
|
||||
try:
|
||||
from langflow.worker import build_vertex
|
||||
|
||||
task = build_vertex.delay(vertex)
|
||||
vertex.task_id = task.id
|
||||
except Exception as exc:
|
||||
logger.debug(f"Error running task in celery: {exc}")
|
||||
vertex.task_id = None
|
||||
vertex.build()
|
||||
return vertex
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from datetime import timezone
|
|||
from typing import List
|
||||
from uuid import UUID
|
||||
from langflow.services.database.models.component import Component, ComponentModel
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.getters import get_session
|
||||
from sqlmodel import Session, select
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
|
|
|||
|
|
@ -1,12 +1,17 @@
|
|||
from http import HTTPStatus
|
||||
from typing import Annotated, Any, Optional, Union
|
||||
from typing import Annotated, Optional, Union
|
||||
from langflow.services.auth.utils import api_key_security, get_current_active_user
|
||||
|
||||
|
||||
from langflow.services.cache.utils import save_uploaded_file
|
||||
from langflow.services.database.models.flow import Flow
|
||||
from langflow.processing.process import process_graph_cached, process_tweaks
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import (
|
||||
get_session_service,
|
||||
get_settings_service,
|
||||
get_task_service,
|
||||
)
|
||||
from loguru import logger
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body, status
|
||||
import sqlalchemy as sa
|
||||
|
|
@ -15,66 +20,43 @@ from langflow.interface.custom.custom_component import CustomComponent
|
|||
|
||||
from langflow.api.v1.schemas import (
|
||||
ProcessResponse,
|
||||
TaskResponse,
|
||||
TaskStatusResponse,
|
||||
UploadFileResponse,
|
||||
CustomComponentCode,
|
||||
)
|
||||
|
||||
from langflow.api.utils import merge_nested_dicts_with_renaming
|
||||
|
||||
from langflow.interface.types import (
|
||||
build_langchain_types_dict,
|
||||
build_langchain_template_custom_component,
|
||||
build_langchain_custom_component_list_from_path,
|
||||
)
|
||||
from langflow.services.getters import get_session
|
||||
|
||||
try:
|
||||
from langflow.worker import process_graph_cached_task
|
||||
except ImportError:
|
||||
|
||||
def process_graph_cached_task(*args, **kwargs):
|
||||
raise NotImplementedError("Celery is not installed")
|
||||
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from sqlmodel import Session
|
||||
|
||||
|
||||
from langflow.services.task.manager import TaskService
|
||||
|
||||
# build router
|
||||
router = APIRouter(tags=["Base"])
|
||||
|
||||
|
||||
@router.get("/all", dependencies=[Depends(get_current_active_user)])
|
||||
def get_all(
|
||||
settings_manager=Depends(get_settings_manager),
|
||||
settings_service=Depends(get_settings_service),
|
||||
):
|
||||
from langflow.interface.types import get_all_types_dict
|
||||
|
||||
logger.debug("Building langchain types dict")
|
||||
native_components = build_langchain_types_dict()
|
||||
# custom_components is a list of dicts
|
||||
# need to merge all the keys into one dict
|
||||
custom_components_from_file: dict[str, Any] = {}
|
||||
if settings_manager.settings.COMPONENTS_PATH:
|
||||
logger.info(
|
||||
f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}"
|
||||
)
|
||||
|
||||
custom_component_dicts = []
|
||||
processed_paths = []
|
||||
for path in settings_manager.settings.COMPONENTS_PATH:
|
||||
if str(path) in processed_paths:
|
||||
continue
|
||||
custom_component_dict = build_langchain_custom_component_list_from_path(
|
||||
str(path)
|
||||
)
|
||||
custom_component_dicts.append(custom_component_dict)
|
||||
processed_paths.append(str(path))
|
||||
|
||||
logger.info(f"Loading {len(custom_component_dicts)} category(ies)")
|
||||
for custom_component_dict in custom_component_dicts:
|
||||
# custom_component_dict is a dict of dicts
|
||||
if not custom_component_dict:
|
||||
continue
|
||||
category = list(custom_component_dict.keys())[0]
|
||||
logger.info(
|
||||
f"Loading {len(custom_component_dict[category])} component(s) from category {category}"
|
||||
)
|
||||
custom_components_from_file = merge_nested_dicts_with_renaming(
|
||||
custom_components_from_file, custom_component_dict
|
||||
)
|
||||
|
||||
return merge_nested_dicts_with_renaming(
|
||||
native_components, custom_components_from_file
|
||||
)
|
||||
try:
|
||||
return get_all_types_dict(settings_service)
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
|
||||
|
||||
# For backwards compatibility we will keep the old endpoint
|
||||
|
|
@ -94,7 +76,9 @@ async def process(
|
|||
tweaks: Optional[dict] = None,
|
||||
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
|
||||
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
|
||||
task_service: "TaskService" = Depends(get_task_service),
|
||||
api_key_user: User = Depends(api_key_security),
|
||||
sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821
|
||||
):
|
||||
"""
|
||||
Endpoint to process an input with a given flow_id.
|
||||
|
|
@ -125,10 +109,55 @@ async def process(
|
|||
graph_data = process_tweaks(graph_data, tweaks)
|
||||
except Exception as exc:
|
||||
logger.error(f"Error processing tweaks: {exc}")
|
||||
response, session_id = process_graph_cached(
|
||||
graph_data, inputs, clear_cache, session_id
|
||||
if sync:
|
||||
task_id, result = await task_service.launch_and_await_task(
|
||||
process_graph_cached_task
|
||||
if task_service.use_celery
|
||||
else process_graph_cached,
|
||||
graph_data,
|
||||
inputs,
|
||||
clear_cache,
|
||||
session_id,
|
||||
)
|
||||
if isinstance(result, dict) and "result" in result:
|
||||
task_result = result["result"]
|
||||
session_id = result["session_id"]
|
||||
elif hasattr(result, "result") and hasattr(result, "session_id"):
|
||||
task_result = result.result
|
||||
|
||||
session_id = result.session_id
|
||||
else:
|
||||
logger.warning(
|
||||
"This is an experimental feature and may not work as expected."
|
||||
"Please report any issues to our GitHub repository."
|
||||
)
|
||||
if session_id is None:
|
||||
# Generate a session ID
|
||||
session_id = get_session_service().generate_key(
|
||||
session_id=session_id, data_graph=graph_data
|
||||
)
|
||||
task_id, task = await task_service.launch_task(
|
||||
process_graph_cached_task
|
||||
if task_service.use_celery
|
||||
else process_graph_cached,
|
||||
graph_data,
|
||||
inputs,
|
||||
clear_cache,
|
||||
session_id,
|
||||
)
|
||||
task_result = task.status
|
||||
|
||||
if task_id:
|
||||
task_response = TaskResponse(id=task_id, href=f"api/v1/task/{task_id}")
|
||||
else:
|
||||
task_response = None
|
||||
|
||||
return ProcessResponse(
|
||||
result=task_result,
|
||||
task=task_response,
|
||||
session_id=session_id,
|
||||
backend=task_service.backend_name,
|
||||
)
|
||||
return ProcessResponse(result=response, session_id=session_id)
|
||||
except sa.exc.StatementError as exc:
|
||||
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
|
||||
if "badly formed hexadecimal UUID string" in str(exc):
|
||||
|
|
@ -151,6 +180,23 @@ async def process(
|
|||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
|
||||
@router.get("/task/{task_id}", response_model=TaskStatusResponse)
|
||||
async def get_task_status(task_id: str):
|
||||
task_service = get_task_service()
|
||||
task = task_service.get_task(task_id)
|
||||
result = None
|
||||
if task.ready():
|
||||
result = task.result
|
||||
if isinstance(result, dict) and "result" in result:
|
||||
result = result["result"]
|
||||
elif hasattr(result, "result"):
|
||||
result = result.result
|
||||
|
||||
if task is None:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
return TaskStatusResponse(status=task.status, result=result)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/upload/{flow_id}",
|
||||
response_model=UploadFileResponse,
|
||||
|
|
@ -159,7 +205,7 @@ async def process(
|
|||
async def create_upload_file(file: UploadFile, flow_id: str):
|
||||
# Cache file
|
||||
try:
|
||||
file_path = save_uploaded_file(file.file, folder_name=flow_id)
|
||||
file_path = save_uploaded_file(file, folder_name=flow_id)
|
||||
|
||||
return UploadFileResponse(
|
||||
flowId=flow_id,
|
||||
|
|
@ -182,6 +228,10 @@ def get_version():
|
|||
async def custom_component(
|
||||
raw_code: CustomComponentCode,
|
||||
):
|
||||
from langflow.interface.types import (
|
||||
build_langchain_template_custom_component,
|
||||
)
|
||||
|
||||
extractor = CustomComponent(code=raw_code.code)
|
||||
extractor.is_check_valid()
|
||||
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ from langflow.services.database.models.flow import (
|
|||
FlowUpdate,
|
||||
)
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_session
|
||||
from langflow.services.getters import get_settings_service
|
||||
import orjson
|
||||
from sqlmodel import Session
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
|
@ -83,7 +83,7 @@ def update_flow(
|
|||
flow_id: UUID,
|
||||
flow: FlowUpdate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
settings_manager=Depends(get_settings_manager),
|
||||
settings_service=Depends(get_settings_service),
|
||||
):
|
||||
"""Update a flow."""
|
||||
|
||||
|
|
@ -91,7 +91,7 @@ def update_flow(
|
|||
if not db_flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
flow_data = flow.dict(exclude_unset=True)
|
||||
if settings_manager.settings.REMOVE_API_KEYS:
|
||||
if settings_service.settings.REMOVE_API_KEYS:
|
||||
flow_data = remove_api_keys(flow_data)
|
||||
for key, value in flow_data.items():
|
||||
if value is not None:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from sqlmodel import Session
|
|||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.getters import get_session
|
||||
from langflow.api.v1.schemas import Token
|
||||
from langflow.services.auth.utils import (
|
||||
authenticate_user,
|
||||
|
|
@ -12,7 +12,7 @@ from langflow.services.auth.utils import (
|
|||
get_current_active_user,
|
||||
)
|
||||
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
router = APIRouter(tags=["Login"])
|
||||
|
||||
|
|
@ -23,7 +23,17 @@ async def login_to_get_access_token(
|
|||
db: Session = Depends(get_session),
|
||||
# _: Session = Depends(get_current_active_user)
|
||||
):
|
||||
if user := authenticate_user(form_data.username, form_data.password, db):
|
||||
try:
|
||||
user = authenticate_user(form_data.username, form_data.password, db)
|
||||
except Exception as exc:
|
||||
if isinstance(exc, HTTPException):
|
||||
raise exc
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
if user:
|
||||
return create_user_tokens(user_id=user.id, db=db, update_last_login=True)
|
||||
else:
|
||||
raise HTTPException(
|
||||
|
|
@ -35,9 +45,9 @@ async def login_to_get_access_token(
|
|||
|
||||
@router.get("/auto_login")
|
||||
async def auto_login(
|
||||
db: Session = Depends(get_session), settings_manager=Depends(get_settings_manager)
|
||||
db: Session = Depends(get_session), settings_service=Depends(get_settings_service)
|
||||
):
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
if settings_service.auth_settings.AUTO_LOGIN:
|
||||
return create_user_longterm_token(db)
|
||||
|
||||
raise HTTPException(
|
||||
|
|
|
|||
|
|
@ -47,11 +47,30 @@ class UpdateTemplateRequest(BaseModel):
|
|||
template: dict
|
||||
|
||||
|
||||
class TaskResponse(BaseModel):
|
||||
"""Task response schema."""
|
||||
|
||||
id: Optional[str] = Field(None)
|
||||
href: Optional[str] = Field(None)
|
||||
|
||||
|
||||
class ProcessResponse(BaseModel):
|
||||
"""Process response schema."""
|
||||
|
||||
result: dict
|
||||
result: Any
|
||||
task: Optional[TaskResponse] = None
|
||||
session_id: Optional[str] = None
|
||||
backend: Optional[str] = None
|
||||
|
||||
|
||||
# TaskStatusResponse(
|
||||
# status=task.status, result=task.result if task.ready() else None
|
||||
# )
|
||||
class TaskStatusResponse(BaseModel):
|
||||
"""Task status response schema."""
|
||||
|
||||
status: str
|
||||
result: Optional[Any] = None
|
||||
|
||||
|
||||
class ChatMessage(BaseModel):
|
||||
|
|
@ -59,6 +78,7 @@ class ChatMessage(BaseModel):
|
|||
|
||||
is_bot: bool = False
|
||||
message: Union[str, None, dict] = None
|
||||
chatKey: Optional[str] = None
|
||||
type: str = "human"
|
||||
|
||||
|
||||
|
|
@ -66,6 +86,7 @@ class ChatResponse(ChatMessage):
|
|||
"""Chat response schema."""
|
||||
|
||||
intermediate_steps: str
|
||||
|
||||
type: str
|
||||
is_bot: bool = True
|
||||
files: list = []
|
||||
|
|
@ -77,6 +98,14 @@ class ChatResponse(ChatMessage):
|
|||
return v
|
||||
|
||||
|
||||
class PromptResponse(ChatMessage):
|
||||
"""Prompt response schema."""
|
||||
|
||||
prompt: str
|
||||
type: str = "prompt"
|
||||
is_bot: bool = True
|
||||
|
||||
|
||||
class FileResponse(ChatMessage):
|
||||
"""File response schema."""
|
||||
|
||||
|
|
|
|||
|
|
@ -13,23 +13,26 @@ from sqlalchemy.exc import IntegrityError
|
|||
from sqlmodel import Session, select
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.getters import get_session, get_settings_service
|
||||
from langflow.services.auth.utils import (
|
||||
get_current_active_superuser,
|
||||
get_current_active_user,
|
||||
get_password_hash,
|
||||
verify_password,
|
||||
)
|
||||
from langflow.services.database.models.user.crud import (
|
||||
get_user_by_id,
|
||||
update_user,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["Users"])
|
||||
router = APIRouter(tags=["Users"], prefix="/users")
|
||||
|
||||
|
||||
@router.post("/user", response_model=UserRead, status_code=201)
|
||||
@router.post("/", response_model=UserRead, status_code=201)
|
||||
def add_user(
|
||||
user: UserCreate,
|
||||
session: Session = Depends(get_session),
|
||||
settings_service=Depends(get_settings_service),
|
||||
) -> User:
|
||||
"""
|
||||
Add a new user to the database.
|
||||
|
|
@ -37,7 +40,7 @@ def add_user(
|
|||
new_user = User.from_orm(user)
|
||||
try:
|
||||
new_user.password = get_password_hash(user.password)
|
||||
|
||||
new_user.is_active = settings_service.auth_settings.NEW_USER_IS_ACTIVE
|
||||
session.add(new_user)
|
||||
session.commit()
|
||||
session.refresh(new_user)
|
||||
|
|
@ -50,7 +53,7 @@ def add_user(
|
|||
return new_user
|
||||
|
||||
|
||||
@router.get("/user", response_model=UserRead)
|
||||
@router.get("/whoami", response_model=UserRead)
|
||||
def read_current_user(
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
) -> User:
|
||||
|
|
@ -60,11 +63,11 @@ def read_current_user(
|
|||
return current_user
|
||||
|
||||
|
||||
@router.get("/users", response_model=UsersResponse)
|
||||
@router.get("/", response_model=UsersResponse)
|
||||
def read_all_users(
|
||||
skip: int = 0,
|
||||
limit: int = 10,
|
||||
current_user: Session = Depends(get_current_active_superuser),
|
||||
_: Session = Depends(get_current_active_superuser),
|
||||
session: Session = Depends(get_session),
|
||||
) -> UsersResponse:
|
||||
"""
|
||||
|
|
@ -82,20 +85,63 @@ def read_all_users(
|
|||
)
|
||||
|
||||
|
||||
@router.patch("/user/{user_id}", response_model=UserRead)
|
||||
@router.patch("/{user_id}", response_model=UserRead)
|
||||
def patch_user(
|
||||
user_id: UUID,
|
||||
user: UserUpdate,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
user_update: UserUpdate,
|
||||
user: User = Depends(get_current_active_user),
|
||||
session: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Update an existing user's data.
|
||||
"""
|
||||
return update_user(user_id, user, session)
|
||||
if not user.is_superuser and user.id != user_id:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You don't have the permission to update this user"
|
||||
)
|
||||
if user_update.password:
|
||||
if not user.is_superuser:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="You can't change your password here"
|
||||
)
|
||||
user_update.password = get_password_hash(user_update.password)
|
||||
|
||||
if user_db := get_user_by_id(session, user_id):
|
||||
return update_user(user_db, user_update, session)
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
|
||||
@router.delete("/user/{user_id}")
|
||||
@router.patch("/{user_id}/reset-password", response_model=UserRead)
|
||||
def reset_password(
|
||||
user_id: UUID,
|
||||
user_update: UserUpdate,
|
||||
user: User = Depends(get_current_active_user),
|
||||
session: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Reset a user's password.
|
||||
"""
|
||||
if user_id != user.id:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="You can't change another user's password"
|
||||
)
|
||||
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
if verify_password(user_update.password, user.password):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="You can't use your current password"
|
||||
)
|
||||
new_password = get_password_hash(user_update.password)
|
||||
user.password = new_password
|
||||
session.commit()
|
||||
session.refresh(user)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@router.delete("/{user_id}", response_model=dict)
|
||||
def delete_user(
|
||||
user_id: UUID,
|
||||
current_user: User = Depends(get_current_active_superuser),
|
||||
|
|
@ -121,31 +167,3 @@ def delete_user(
|
|||
session.commit()
|
||||
|
||||
return {"detail": "User deleted"}
|
||||
|
||||
|
||||
# TODO: REMOVE - Just for testing purposes
|
||||
@router.post("/super_user", response_model=User)
|
||||
def add_super_user_for_testing_purposes_delete_me_before_merge_into_dev(
|
||||
session: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Add a superuser for testing purposes.
|
||||
(This should be removed in production)
|
||||
"""
|
||||
new_user = User(
|
||||
username="superuser",
|
||||
password=get_password_hash("12345"),
|
||||
is_active=True,
|
||||
is_superuser=True,
|
||||
last_login_at=None,
|
||||
)
|
||||
|
||||
try:
|
||||
session.add(new_user)
|
||||
session.commit()
|
||||
session.refresh(new_user)
|
||||
except IntegrityError as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=400, detail="User exists") from e
|
||||
|
||||
return new_user
|
||||
|
|
|
|||
|
|
@ -58,6 +58,16 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest):
|
|||
|
||||
def get_old_custom_fields(prompt_request):
|
||||
try:
|
||||
if (
|
||||
len(prompt_request.frontend_node.custom_fields) == 1
|
||||
and prompt_request.name == ""
|
||||
):
|
||||
# If there is only one custom field and the name is empty string
|
||||
# then we are dealing with the first prompt request after the node was created
|
||||
prompt_request.name = list(
|
||||
prompt_request.frontend_node.custom_fields.keys()
|
||||
)[0]
|
||||
|
||||
old_custom_fields = prompt_request.frontend_node.custom_fields[
|
||||
prompt_request.name
|
||||
].copy()
|
||||
|
|
|
|||
|
|
@ -42,8 +42,8 @@ class ConversationalAgent(CustomComponent):
|
|||
self,
|
||||
model_name: str,
|
||||
openai_api_key: str,
|
||||
openai_api_base: str,
|
||||
tools: Tool,
|
||||
openai_api_base: Optional[str] = None,
|
||||
memory: Optional[BaseMemory] = None,
|
||||
system_message: Optional[SystemMessagePromptTemplate] = None,
|
||||
max_token_limit: int = 2000,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from langflow import CustomComponent
|
||||
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain import PromptTemplate
|
||||
from langchain.prompts import PromptTemplate
|
||||
from langchain.schema import Document
|
||||
|
||||
|
||||
|
|
@ -16,17 +16,14 @@ class PromptRunner(CustomComponent):
|
|||
"info": "Make sure the prompt has all variables filled.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
"inputs": {"field_type": "code"},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
llm: BaseLLM,
|
||||
prompt: PromptTemplate,
|
||||
self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {}
|
||||
) -> Document:
|
||||
chain = prompt | llm
|
||||
# The input is an empty dict because the prompt is already filled
|
||||
result = chain.invoke({})
|
||||
result = chain.invoke(input=inputs)
|
||||
if hasattr(result, "content"):
|
||||
result = result.content
|
||||
self.repr_value = result
|
||||
|
|
|
|||
42
src/backend/langflow/components/llms/HuggingFaceEndpoints.py
Normal file
42
src/backend/langflow/components/llms/HuggingFaceEndpoints.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.llms import HuggingFaceEndpoint
|
||||
from langchain.llms.base import BaseLLM
|
||||
|
||||
|
||||
class HuggingFaceEndpointsComponent(CustomComponent):
|
||||
display_name: str = "Hugging Face Inference API"
|
||||
description: str = "LLM model from Hugging Face Inference API."
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"endpoint_url": {"display_name": "Endpoint URL", "password": True},
|
||||
"task": {
|
||||
"display_name": "Task",
|
||||
"type": "select",
|
||||
"options": ["text2text-generation", "text-generation", "summarization"],
|
||||
},
|
||||
"huggingfacehub_api_token": {"display_name": "API token", "password": True},
|
||||
"model_kwargs": {
|
||||
"display_name": "Model Keyword Arguments",
|
||||
"field_type": "code",
|
||||
},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
endpoint_url: str,
|
||||
task="text2text-generation",
|
||||
huggingfacehub_api_token: Optional[str] = None,
|
||||
model_kwargs: Optional[dict] = None,
|
||||
) -> BaseLLM:
|
||||
try:
|
||||
output = HuggingFaceEndpoint(
|
||||
endpoint_url=endpoint_url,
|
||||
task=task,
|
||||
huggingfacehub_api_token=huggingfacehub_api_token,
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValueError("Could not connect to HuggingFace Endpoints API.") from e
|
||||
return output
|
||||
0
src/backend/langflow/components/llms/__init__.py
Normal file
0
src/backend/langflow/components/llms/__init__.py
Normal file
28
src/backend/langflow/components/retrievers/MetalRetriever.py
Normal file
28
src/backend/langflow/components/retrievers/MetalRetriever.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.retrievers import MetalRetriever
|
||||
from langchain.schema import BaseRetriever
|
||||
from metal_sdk.metal import Metal # type: ignore
|
||||
|
||||
|
||||
class MetalRetrieverComponent(CustomComponent):
|
||||
display_name: str = "Metal Retriever"
|
||||
description: str = "Retriever that uses the Metal API."
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"api_key": {"display_name": "API Key", "password": True},
|
||||
"client_id": {"display_name": "Client ID", "password": True},
|
||||
"index_id": {"display_name": "Index ID"},
|
||||
"params": {"display_name": "Parameters"},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self, api_key: str, client_id: str, index_id: str, params: Optional[dict] = None
|
||||
) -> BaseRetriever:
|
||||
try:
|
||||
metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id)
|
||||
except Exception as e:
|
||||
raise ValueError("Could not connect to Metal API.") from e
|
||||
return MetalRetriever(client=metal, params=params or {})
|
||||
0
src/backend/langflow/components/retrievers/__init__.py
Normal file
0
src/backend/langflow/components/retrievers/__init__.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.text_splitter import Language
|
||||
from langchain.schema import Document
|
||||
|
||||
|
||||
class LanguageRecursiveTextSplitterComponent(CustomComponent):
|
||||
display_name: str = "Language Recursive Text Splitter"
|
||||
description: str = "Split text into chunks of a specified length based on language."
|
||||
documentation: str = "https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter"
|
||||
|
||||
def build_config(self):
|
||||
options = [x.value for x in Language]
|
||||
return {
|
||||
"documents": {
|
||||
"display_name": "Documents",
|
||||
"info": "The documents to split.",
|
||||
},
|
||||
"separator_type": {
|
||||
"display_name": "Separator Type",
|
||||
"info": "The type of separator to use.",
|
||||
"field_type": "str",
|
||||
"options": options,
|
||||
"value": "Python",
|
||||
},
|
||||
"separators": {
|
||||
"display_name": "Separators",
|
||||
"info": "The characters to split on.",
|
||||
"is_list": True,
|
||||
},
|
||||
"chunk_size": {
|
||||
"display_name": "Chunk Size",
|
||||
"info": "The maximum length of each chunk.",
|
||||
"field_type": "int",
|
||||
"value": 1000,
|
||||
},
|
||||
"chunk_overlap": {
|
||||
"display_name": "Chunk Overlap",
|
||||
"info": "The amount of overlap between chunks.",
|
||||
"field_type": "int",
|
||||
"value": 200,
|
||||
},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
documents: list[Document],
|
||||
chunk_size: Optional[int] = 1000,
|
||||
chunk_overlap: Optional[int] = 200,
|
||||
separator_type: Optional[str] = "Python",
|
||||
) -> list[Document]:
|
||||
"""
|
||||
Split text into chunks of a specified length.
|
||||
|
||||
Args:
|
||||
separators (list[str]): The characters to split on.
|
||||
chunk_size (int): The maximum length of each chunk.
|
||||
chunk_overlap (int): The amount of overlap between chunks.
|
||||
length_function (function): The function to use to calculate the length of the text.
|
||||
|
||||
Returns:
|
||||
list[str]: The chunks of text.
|
||||
"""
|
||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||
|
||||
# Make sure chunk_size and chunk_overlap are ints
|
||||
if isinstance(chunk_size, str):
|
||||
chunk_size = int(chunk_size)
|
||||
if isinstance(chunk_overlap, str):
|
||||
chunk_overlap = int(chunk_overlap)
|
||||
|
||||
splitter = RecursiveCharacterTextSplitter.from_language(
|
||||
language=Language(separator_type),
|
||||
chunk_size=chunk_size,
|
||||
chunk_overlap=chunk_overlap,
|
||||
)
|
||||
|
||||
docs = splitter.split_documents(documents)
|
||||
return docs
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
from typing import Optional
|
||||
from langflow import CustomComponent
|
||||
from langchain.schema import Document
|
||||
from langflow.utils.util import build_loader_repr_from_documents
|
||||
|
||||
|
||||
class RecursiveCharacterTextSplitterComponent(CustomComponent):
|
||||
display_name: str = "Recursive Character Text Splitter"
|
||||
description: str = "Split text into chunks of a specified length."
|
||||
documentation: str = "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter"
|
||||
|
||||
def build_config(self):
|
||||
return {
|
||||
"documents": {
|
||||
"display_name": "Documents",
|
||||
"info": "The documents to split.",
|
||||
},
|
||||
"separators": {
|
||||
"display_name": "Separators",
|
||||
"info": 'The characters to split on.\nIf left empty defaults to ["\\n\\n", "\\n", " ", ""].',
|
||||
"is_list": True,
|
||||
},
|
||||
"chunk_size": {
|
||||
"display_name": "Chunk Size",
|
||||
"info": "The maximum length of each chunk.",
|
||||
"field_type": "int",
|
||||
"value": 1000,
|
||||
},
|
||||
"chunk_overlap": {
|
||||
"display_name": "Chunk Overlap",
|
||||
"info": "The amount of overlap between chunks.",
|
||||
"field_type": "int",
|
||||
"value": 200,
|
||||
},
|
||||
"code": {"show": False},
|
||||
}
|
||||
|
||||
def build(
|
||||
self,
|
||||
documents: list[Document],
|
||||
separators: Optional[list[str]] = None,
|
||||
chunk_size: Optional[int] = 1000,
|
||||
chunk_overlap: Optional[int] = 200,
|
||||
) -> list[Document]:
|
||||
"""
|
||||
Split text into chunks of a specified length.
|
||||
|
||||
Args:
|
||||
separators (list[str]): The characters to split on.
|
||||
chunk_size (int): The maximum length of each chunk.
|
||||
chunk_overlap (int): The amount of overlap between chunks.
|
||||
length_function (function): The function to use to calculate the length of the text.
|
||||
|
||||
Returns:
|
||||
list[str]: The chunks of text.
|
||||
"""
|
||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||
|
||||
if separators == "":
|
||||
separators = None
|
||||
elif separators:
|
||||
# check if the separators list has escaped characters
|
||||
# if there are escaped characters, unescape them
|
||||
separators = [x.encode().decode("unicode-escape") for x in separators]
|
||||
|
||||
# Make sure chunk_size and chunk_overlap are ints
|
||||
if isinstance(chunk_size, str):
|
||||
chunk_size = int(chunk_size)
|
||||
if isinstance(chunk_overlap, str):
|
||||
chunk_overlap = int(chunk_overlap)
|
||||
splitter = RecursiveCharacterTextSplitter(
|
||||
separators=separators,
|
||||
chunk_size=chunk_size,
|
||||
chunk_overlap=chunk_overlap,
|
||||
)
|
||||
|
||||
docs = splitter.split_documents(documents)
|
||||
self.repr_value = build_loader_repr_from_documents(docs)
|
||||
return docs
|
||||
|
|
@ -19,7 +19,6 @@ class GetRequest(CustomComponent):
|
|||
},
|
||||
"headers": {
|
||||
"display_name": "Headers",
|
||||
"field_type": "code",
|
||||
"info": "The headers to send with the request.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ class PostRequest(CustomComponent):
|
|||
"url": {"display_name": "URL", "info": "The URL to make the request to."},
|
||||
"headers": {
|
||||
"display_name": "Headers",
|
||||
"field_type": "code",
|
||||
"info": "The headers to send with the request.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class UpdateRequest(CustomComponent):
|
|||
"url": {"display_name": "URL", "info": "The URL to make the request to."},
|
||||
"headers": {
|
||||
"display_name": "Headers",
|
||||
"field_type": "code",
|
||||
"field_type": "NestedDict",
|
||||
"info": "The headers to send with the request.",
|
||||
},
|
||||
"code": {"show": False},
|
||||
|
|
|
|||
|
|
@ -171,8 +171,6 @@ prompts:
|
|||
textsplitters:
|
||||
CharacterTextSplitter:
|
||||
documentation: "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter"
|
||||
RecursiveCharacterTextSplitter:
|
||||
documentation: "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter"
|
||||
toolkits:
|
||||
OpenAPIToolkit:
|
||||
documentation: ""
|
||||
|
|
|
|||
0
src/backend/langflow/core/__init__.py
Normal file
0
src/backend/langflow/core/__init__.py
Normal file
11
src/backend/langflow/core/celery_app.py
Normal file
11
src/backend/langflow/core/celery_app.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
from celery import Celery # type: ignore
|
||||
|
||||
|
||||
def make_celery(app_name: str, config: str) -> Celery:
|
||||
celery_app = Celery(app_name)
|
||||
celery_app.config_from_object(config)
|
||||
celery_app.conf.task_routes = {"langflow.worker.tasks.*": {"queue": "langflow"}}
|
||||
return celery_app
|
||||
|
||||
|
||||
celery_app = make_celery("langflow", "langflow.core.celeryconfig")
|
||||
14
src/backend/langflow/core/celeryconfig.py
Normal file
14
src/backend/langflow/core/celeryconfig.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# celeryconfig.py
|
||||
import os
|
||||
|
||||
langflow_redis_host = os.environ.get("LANGFLOW_REDIS_HOST")
|
||||
langflow_redis_port = os.environ.get("LANGFLOW_REDIS_PORT")
|
||||
if "BROKER_URL" in os.environ and "RESULT_BACKEND" in os.environ:
|
||||
# RabbitMQ
|
||||
broker_url = os.environ.get("BROKER_URL", "amqp://localhost")
|
||||
result_backend = os.environ.get("RESULT_BACKEND", "redis://localhost:6379/0")
|
||||
elif langflow_redis_host and langflow_redis_port:
|
||||
broker_url = f"redis://{langflow_redis_host}:{langflow_redis_port}/0"
|
||||
result_backend = f"redis://{langflow_redis_host}:{langflow_redis_port}/0"
|
||||
# tasks should be json or pickle
|
||||
accept_content = ["json", "pickle"]
|
||||
3
src/backend/langflow/field_typing/__init__.py
Normal file
3
src/backend/langflow/field_typing/__init__.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
from .base import NestedDict
|
||||
|
||||
__all__ = ["NestedDict"]
|
||||
4
src/backend/langflow/field_typing/base.py
Normal file
4
src/backend/langflow/field_typing/base.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
from typing import Union, Dict
|
||||
|
||||
# Type alias for more complex dicts
|
||||
NestedDict = Dict[str, Union[str, Dict]]
|
||||
|
|
@ -68,6 +68,17 @@ class Edge:
|
|||
f"has invalid handles"
|
||||
)
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.source = state["source"]
|
||||
self.target = state["target"]
|
||||
self.target_param = state["target_param"]
|
||||
self.source_handle = state["source_handle"]
|
||||
self.target_handle = state["target_handle"]
|
||||
|
||||
def reset(self) -> None:
|
||||
self.source._build_params()
|
||||
self.target._build_params()
|
||||
|
||||
def validate_edge(self) -> None:
|
||||
# Validate that the outputs of the source node are valid inputs
|
||||
# for the target node
|
||||
|
|
|
|||
|
|
@ -32,6 +32,12 @@ class Graph:
|
|||
self._edges = self._graph_data["edges"]
|
||||
self._build_graph()
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.__dict__.update(state)
|
||||
for edge in self.edges:
|
||||
edge.reset()
|
||||
edge.validate_edge()
|
||||
|
||||
@classmethod
|
||||
def from_payload(cls, payload: Dict) -> "Graph":
|
||||
"""
|
||||
|
|
@ -55,6 +61,11 @@ class Graph:
|
|||
f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}"
|
||||
) from exc
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Graph):
|
||||
return False
|
||||
return self.__repr__() == other.__repr__()
|
||||
|
||||
def _build_graph(self) -> None:
|
||||
"""Builds the graph from the nodes and edges."""
|
||||
self.nodes = self._build_vertices()
|
||||
|
|
@ -154,7 +165,7 @@ class Graph:
|
|||
def generator_build(self) -> Generator[Vertex, None, None]:
|
||||
"""Builds each vertex in the graph and yields it."""
|
||||
sorted_vertices = self.topological_sort()
|
||||
logger.debug("Sorted vertices: %s", sorted_vertices)
|
||||
logger.debug("There are %s vertices in the graph", len(sorted_vertices))
|
||||
yield from sorted_vertices
|
||||
|
||||
def get_node_neighbors(self, node: Vertex) -> Dict[Vertex, int]:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import ast
|
||||
import pickle
|
||||
from langflow.graph.utils import UnbuiltObject
|
||||
from langflow.graph.vertex.utils import is_basic_type
|
||||
from langflow.interface.initialize import loading
|
||||
from langflow.interface.listing import lazy_load_dict
|
||||
from langflow.utils.constants import DIRECT_TYPES
|
||||
|
|
@ -12,12 +14,19 @@ import types
|
|||
from typing import Any, Dict, List, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.graph.edge.base import Edge
|
||||
|
||||
|
||||
class Vertex:
|
||||
def __init__(self, data: Dict, base_type: Optional[str] = None) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
data: Dict,
|
||||
base_type: Optional[str] = None,
|
||||
is_task: bool = False,
|
||||
params: Optional[Dict] = None,
|
||||
) -> None:
|
||||
self.id: str = data["id"]
|
||||
self._data = data
|
||||
self.edges: List["Edge"] = []
|
||||
|
|
@ -26,6 +35,59 @@ class Vertex:
|
|||
self._built_object = UnbuiltObject()
|
||||
self._built = False
|
||||
self.artifacts: Dict[str, Any] = {}
|
||||
self.task_id: Optional[str] = None
|
||||
self.is_task = is_task
|
||||
self.params = params or {}
|
||||
|
||||
def reset_params(self):
|
||||
for edge in self.edges:
|
||||
if edge.source != self:
|
||||
target_param = edge.target_param
|
||||
if target_param in ["document", "texts"]:
|
||||
# this means they got data and have already ingested it
|
||||
# so we continue after removing the param
|
||||
self.params.pop(target_param, None)
|
||||
continue
|
||||
|
||||
if target_param in self.params and not is_basic_type(
|
||||
self.params[target_param]
|
||||
):
|
||||
# edge.source.params = {}
|
||||
edge.source._build_params()
|
||||
edge.source._built_object = UnbuiltObject()
|
||||
edge.source._built = False
|
||||
|
||||
self.params[target_param] = edge.source
|
||||
|
||||
def __getstate__(self):
|
||||
state_dict = self.__dict__.copy()
|
||||
try:
|
||||
# try pickling the built object
|
||||
# if it fails, then we need to delete it
|
||||
# and build it again
|
||||
pickle.dumps(state_dict["_built_object"])
|
||||
except Exception:
|
||||
self.reset_params()
|
||||
del state_dict["_built_object"]
|
||||
del state_dict["_built"]
|
||||
return state_dict
|
||||
|
||||
def __setstate__(self, state):
|
||||
self._data = state["_data"]
|
||||
self.params = state["params"]
|
||||
self.base_type = state["base_type"]
|
||||
self.is_task = state["is_task"]
|
||||
self.edges = state["edges"]
|
||||
self.id = state["id"]
|
||||
self._parse_data()
|
||||
if "_built_object" in state:
|
||||
self._built_object = state["_built_object"]
|
||||
self._built = state["_built"]
|
||||
else:
|
||||
self._built_object = UnbuiltObject()
|
||||
self._built = False
|
||||
self.artifacts: Dict[str, Any] = {}
|
||||
self.task_id: Optional[str] = None
|
||||
self.parent_node_id: Optional[str] = self._data.get("parent_node_id")
|
||||
self.parent_is_top_level = False
|
||||
|
||||
|
|
@ -73,6 +135,13 @@ class Vertex:
|
|||
self.base_type = base_type
|
||||
break
|
||||
|
||||
def get_task(self):
|
||||
# using the task_id, get the task from celery
|
||||
# and return it
|
||||
from celery.result import AsyncResult # type: ignore
|
||||
|
||||
return AsyncResult(self.task_id)
|
||||
|
||||
def _build_params(self):
|
||||
# sourcery skip: merge-list-append, remove-redundant-if
|
||||
# Some params are required, some are optional
|
||||
|
|
@ -94,9 +163,11 @@ class Vertex:
|
|||
for key, value in self.data["node"]["template"].items()
|
||||
if isinstance(value, dict)
|
||||
}
|
||||
params = {}
|
||||
params = self.params.copy() if self.params else {}
|
||||
|
||||
for edge in self.edges:
|
||||
if not hasattr(edge, "target_param"):
|
||||
continue
|
||||
param_key = edge.target_param
|
||||
if param_key in template_dict:
|
||||
if template_dict[param_key]["list"]:
|
||||
|
|
@ -107,6 +178,8 @@ class Vertex:
|
|||
params[param_key] = edge.source
|
||||
|
||||
for key, value in template_dict.items():
|
||||
if key in params:
|
||||
continue
|
||||
# Skip _type and any value that has show == False and is not code
|
||||
# If we don't want to show code but we want to use it
|
||||
if key == "_type" or (not value.get("show") and key != "code"):
|
||||
|
|
@ -117,9 +190,10 @@ class Vertex:
|
|||
# Load the type in value.get('suffixes') using
|
||||
# what is inside value.get('content')
|
||||
# value.get('value') is the file name
|
||||
file_path = value.get("file_path")
|
||||
|
||||
params[key] = file_path
|
||||
if file_path := value.get("file_path"):
|
||||
params[key] = file_path
|
||||
else:
|
||||
raise ValueError(f"File path not found for {self.vertex_type}")
|
||||
elif value.get("type") in DIRECT_TYPES and params.get(key) is None:
|
||||
if value.get("type") == "code":
|
||||
try:
|
||||
|
|
@ -127,6 +201,19 @@ class Vertex:
|
|||
except Exception as exc:
|
||||
logger.debug(f"Error parsing code: {exc}")
|
||||
params[key] = value.get("value")
|
||||
elif value.get("type") in ["dict", "NestedDict"]:
|
||||
# When dict comes from the frontend it comes as a
|
||||
# list of dicts, so we need to convert it to a dict
|
||||
# before passing it to the build method
|
||||
_value = value.get("value")
|
||||
if isinstance(_value, list):
|
||||
params[key] = {
|
||||
k: v
|
||||
for item in value.get("value", [])
|
||||
for k, v in item.items()
|
||||
}
|
||||
elif isinstance(_value, dict):
|
||||
params[key] = _value
|
||||
else:
|
||||
params[key] = value.get("value")
|
||||
|
||||
|
|
@ -136,6 +223,7 @@ class Vertex:
|
|||
else:
|
||||
params.pop(key, None)
|
||||
# Add _type to params
|
||||
self._raw_params = params
|
||||
self.params = params
|
||||
|
||||
def _build(self, user_id=None):
|
||||
|
|
@ -143,13 +231,13 @@ class Vertex:
|
|||
Initiate the build process.
|
||||
"""
|
||||
logger.debug(f"Building {self.vertex_type}")
|
||||
self._build_each_node_in_params_dict()
|
||||
self._build_each_node_in_params_dict(user_id)
|
||||
self._get_and_instantiate_class(user_id)
|
||||
self._validate_built_object()
|
||||
|
||||
self._built = True
|
||||
|
||||
def _build_each_node_in_params_dict(self):
|
||||
def _build_each_node_in_params_dict(self, user_id=None):
|
||||
"""
|
||||
Iterates over each node in the params dictionary and builds it.
|
||||
"""
|
||||
|
|
@ -158,9 +246,9 @@ class Vertex:
|
|||
if value == self:
|
||||
del self.params[key]
|
||||
continue
|
||||
self._build_node_and_update_params(key, value)
|
||||
self._build_node_and_update_params(key, value, user_id)
|
||||
elif isinstance(value, list) and self._is_list_of_nodes(value):
|
||||
self._build_list_of_nodes_and_update_params(key, value)
|
||||
self._build_list_of_nodes_and_update_params(key, value, user_id)
|
||||
|
||||
def _is_node(self, value):
|
||||
"""
|
||||
|
|
@ -174,11 +262,31 @@ class Vertex:
|
|||
"""
|
||||
return all(self._is_node(node) for node in value)
|
||||
|
||||
def get_result(self, user_id=None, timeout=None) -> Any:
|
||||
# Check if the Vertex was built already
|
||||
if self._built:
|
||||
return self._built_object
|
||||
|
||||
if self.is_task and self.task_id is not None:
|
||||
task = self.get_task()
|
||||
result = task.get(timeout=timeout)
|
||||
if result is not None: # If result is ready
|
||||
self._update_built_object_and_artifacts(result)
|
||||
return self._built_object
|
||||
else:
|
||||
# Handle the case when the result is not ready (retry, throw exception, etc.)
|
||||
pass
|
||||
|
||||
# If there's no task_id, build the vertex locally
|
||||
self.build(user_id)
|
||||
return self._built_object
|
||||
|
||||
def _build_node_and_update_params(self, key, node, user_id=None):
|
||||
"""
|
||||
Builds a given node and updates the params dictionary accordingly.
|
||||
"""
|
||||
result = node.build(user_id)
|
||||
|
||||
result = node.get_result(user_id)
|
||||
self._handle_func(key, result)
|
||||
if isinstance(result, list):
|
||||
self._extend_params_list_with_result(key, result)
|
||||
|
|
@ -192,7 +300,7 @@ class Vertex:
|
|||
"""
|
||||
self.params[key] = []
|
||||
for node in nodes:
|
||||
built = node.build(user_id)
|
||||
built = node.get_result(user_id)
|
||||
if isinstance(built, list):
|
||||
if key not in self.params:
|
||||
self.params[key] = []
|
||||
|
|
@ -237,6 +345,7 @@ class Vertex:
|
|||
)
|
||||
self._update_built_object_and_artifacts(result)
|
||||
except Exception as exc:
|
||||
logger.exception(exc)
|
||||
raise ValueError(
|
||||
f"Error building node {self.vertex_type}: {str(exc)}"
|
||||
) from exc
|
||||
|
|
@ -277,7 +386,10 @@ class Vertex:
|
|||
return f"Vertex(id={self.id}, data={self.data})"
|
||||
|
||||
def __eq__(self, __o: object) -> bool:
|
||||
return self.id == __o.id if isinstance(__o, Vertex) else False
|
||||
try:
|
||||
return self.id == __o.id if isinstance(__o, Vertex) else False
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return id(self)
|
||||
|
|
|
|||
|
|
@ -7,14 +7,27 @@ from langflow.interface.utils import extract_input_variables_from_prompt
|
|||
|
||||
|
||||
class AgentVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="agents")
|
||||
def __init__(self, data: Dict, params: Optional[Dict] = None):
|
||||
super().__init__(data, base_type="agents", params=params)
|
||||
|
||||
self.tools: List[Union[ToolkitVertex, ToolVertex]] = []
|
||||
self.chains: List[ChainVertex] = []
|
||||
|
||||
def __getstate__(self):
|
||||
state = super().__getstate__()
|
||||
state["tools"] = self.tools
|
||||
state["chains"] = self.chains
|
||||
return state
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.tools = state["tools"]
|
||||
self.chains = state["chains"]
|
||||
super().__setstate__(state)
|
||||
|
||||
def _set_tools_and_chains(self) -> None:
|
||||
for edge in self.edges:
|
||||
if not hasattr(edge, "source"):
|
||||
continue
|
||||
source_node = edge.source
|
||||
if isinstance(source_node, (ToolVertex, ToolkitVertex)):
|
||||
self.tools.append(source_node)
|
||||
|
|
@ -38,16 +51,16 @@ class AgentVertex(Vertex):
|
|||
|
||||
|
||||
class ToolVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="tools")
|
||||
def __init__(self, data: Dict, params: Optional[Dict] = None):
|
||||
super().__init__(data, base_type="tools", params=params)
|
||||
|
||||
|
||||
class LLMVertex(Vertex):
|
||||
built_node_type = None
|
||||
class_built_object = None
|
||||
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="llms")
|
||||
def __init__(self, data: Dict, params: Optional[Dict] = None):
|
||||
super().__init__(data, base_type="llms", params=params)
|
||||
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
# LLM is different because some models might take up too much memory
|
||||
|
|
@ -64,13 +77,13 @@ class LLMVertex(Vertex):
|
|||
|
||||
|
||||
class ToolkitVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="toolkits")
|
||||
def __init__(self, data: Dict, params=None):
|
||||
super().__init__(data, base_type="toolkits", params=params)
|
||||
|
||||
|
||||
class FileToolVertex(ToolVertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
def __init__(self, data: Dict, params=None):
|
||||
super().__init__(data, params=params)
|
||||
|
||||
|
||||
class WrapperVertex(Vertex):
|
||||
|
|
@ -86,17 +99,19 @@ class WrapperVertex(Vertex):
|
|||
|
||||
|
||||
class DocumentLoaderVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="documentloaders")
|
||||
def __init__(self, data: Dict, params: Optional[Dict] = None):
|
||||
super().__init__(data, base_type="documentloaders", params=params)
|
||||
|
||||
def _built_object_repr(self):
|
||||
# This built_object is a list of documents. Maybe we should
|
||||
# show how many documents are in the list?
|
||||
|
||||
if self._built_object:
|
||||
avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(
|
||||
self._built_object
|
||||
)
|
||||
avg_length = sum(
|
||||
len(doc.page_content)
|
||||
for doc in self._built_object
|
||||
if hasattr(doc, "page_content")
|
||||
) / len(self._built_object)
|
||||
return f"""{self.vertex_type}({len(self._built_object)} documents)
|
||||
\nAvg. Document Length (characters): {int(avg_length)}
|
||||
Documents: {self._built_object[:3]}..."""
|
||||
|
|
@ -104,14 +119,51 @@ class DocumentLoaderVertex(Vertex):
|
|||
|
||||
|
||||
class EmbeddingVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="embeddings")
|
||||
def __init__(self, data: Dict, params: Optional[Dict] = None):
|
||||
super().__init__(data, base_type="embeddings", params=params)
|
||||
|
||||
|
||||
class VectorStoreVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
def __init__(self, data: Dict, params=None):
|
||||
super().__init__(data, base_type="vectorstores")
|
||||
|
||||
self.params = params or {}
|
||||
|
||||
# VectorStores may contain databse connections
|
||||
# so we need to define the __reduce__ method and the __setstate__ method
|
||||
# to avoid pickling errors
|
||||
def clean_edges_for_pickling(self):
|
||||
# for each edge that has self as source
|
||||
# we need to clear the _built_object of the target
|
||||
# so that we don't try to pickle a database connection
|
||||
for edge in self.edges:
|
||||
if edge.source == self:
|
||||
edge.target._built_object = None
|
||||
edge.target._built = False
|
||||
edge.target.params[edge.target_param] = self
|
||||
|
||||
def remove_docs_and_texts_from_params(self):
|
||||
# remove documents and texts from params
|
||||
# so that we don't try to pickle a database connection
|
||||
self.params.pop("documents", None)
|
||||
self.params.pop("texts", None)
|
||||
|
||||
def __getstate__(self):
|
||||
# We want to save the params attribute
|
||||
# and if "documents" or "texts" are in the params
|
||||
# we want to remove them because they have already
|
||||
# been processed.
|
||||
params = self.params.copy()
|
||||
params.pop("documents", None)
|
||||
params.pop("texts", None)
|
||||
self.clean_edges_for_pickling()
|
||||
|
||||
return super().__getstate__()
|
||||
|
||||
def __setstate__(self, state):
|
||||
super().__setstate__(state)
|
||||
self.remove_docs_and_texts_from_params()
|
||||
|
||||
|
||||
class MemoryVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
|
|
@ -124,8 +176,8 @@ class RetrieverVertex(Vertex):
|
|||
|
||||
|
||||
class TextSplitterVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="textsplitters")
|
||||
def __init__(self, data: Dict, params: Optional[Dict] = None):
|
||||
super().__init__(data, base_type="textsplitters", params=params)
|
||||
|
||||
def _built_object_repr(self):
|
||||
# This built_object is a list of documents. Maybe we should
|
||||
|
|
@ -211,7 +263,7 @@ class PromptVertex(Vertex):
|
|||
self.params["input_variables"] = list(
|
||||
set(self.params["input_variables"])
|
||||
)
|
||||
else:
|
||||
elif isinstance(self.params, dict):
|
||||
self.params.pop("input_variables", None)
|
||||
|
||||
self._build(user_id=user_id)
|
||||
|
|
@ -258,8 +310,13 @@ class OutputParserVertex(Vertex):
|
|||
|
||||
class CustomComponentVertex(Vertex):
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="custom_components")
|
||||
super().__init__(data, base_type="custom_components", is_task=True)
|
||||
|
||||
def _built_object_repr(self):
|
||||
if self.task_id and self.is_task:
|
||||
if task := self.get_task():
|
||||
return str(task.info)
|
||||
else:
|
||||
return f"Task {self.task_id} is not running"
|
||||
if self.artifacts and "repr" in self.artifacts:
|
||||
return self.artifacts["repr"] or super()._built_object_repr()
|
||||
|
|
|
|||
5
src/backend/langflow/graph/vertex/utils.py
Normal file
5
src/backend/langflow/graph/vertex/utils.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from langflow.utils.constants import PYTHON_BASIC_TYPES
|
||||
|
||||
|
||||
def is_basic_type(obj):
|
||||
return type(obj) in PYTHON_BASIC_TYPES
|
||||
|
|
@ -5,7 +5,7 @@ from langchain.agents import types
|
|||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.agents.custom import CUSTOM_AGENTS
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.agents import AgentFrontendNode
|
||||
from loguru import logger
|
||||
|
|
@ -54,7 +54,7 @@ class AgentCreator(LangChainTypeCreator):
|
|||
# Now this is a generator
|
||||
def to_list(self) -> List[str]:
|
||||
names = []
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
for _, agent in self.type_to_loader_dict.items():
|
||||
agent_name = (
|
||||
agent.function_name()
|
||||
|
|
@ -62,8 +62,8 @@ class AgentCreator(LangChainTypeCreator):
|
|||
else agent.__name__
|
||||
)
|
||||
if (
|
||||
agent_name in settings_manager.settings.AGENTS
|
||||
or settings_manager.settings.DEV
|
||||
agent_name in settings_service.settings.AGENTS
|
||||
or settings_service.settings.DEV
|
||||
):
|
||||
names.append(agent_name)
|
||||
return names
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from typing import Any, List, Optional
|
||||
|
||||
from langchain import LLMChain
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.agents import (
|
||||
AgentExecutor,
|
||||
Tool,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from langchain import LLMChain
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.agents import AgentExecutor, ZeroShotAgent
|
||||
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
|
||||
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
|
|||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.agents import AgentExecutor
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
from pydantic import BaseModel
|
||||
|
||||
from langflow.template.field.base import TemplateField
|
||||
|
|
@ -27,11 +27,11 @@ class LangChainTypeCreator(BaseModel, ABC):
|
|||
@property
|
||||
def docs_map(self) -> Dict[str, str]:
|
||||
"""A dict with the name of the component as key and the documentation link as value."""
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
if self.name_docs_dict is None:
|
||||
try:
|
||||
type_settings = getattr(
|
||||
settings_manager.settings, self.type_name.upper()
|
||||
settings_service.settings, self.type_name.upper()
|
||||
)
|
||||
self.name_docs_dict = {
|
||||
name: value_dict["documentation"]
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional, Type
|
|||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.chains import ChainFrontendNode
|
||||
from loguru import logger
|
||||
|
|
@ -31,7 +31,7 @@ class ChainCreator(LangChainTypeCreator):
|
|||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
if self.type_dict is None:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
self.type_dict: dict[str, Any] = {
|
||||
chain_name: import_class(f"langchain.chains.{chain_name}")
|
||||
for chain_name in chains.__all__
|
||||
|
|
@ -45,8 +45,8 @@ class ChainCreator(LangChainTypeCreator):
|
|||
self.type_dict = {
|
||||
name: chain
|
||||
for name, chain in self.type_dict.items()
|
||||
if name in settings_manager.settings.CHAINS
|
||||
or settings_manager.settings.DEV
|
||||
if name in settings_service.settings.CHAINS
|
||||
or settings_service.settings.DEV
|
||||
}
|
||||
return self.type_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from langchain import PromptTemplate
|
||||
from langchain.prompts import PromptTemplate
|
||||
from langchain.chains.base import Chain
|
||||
from langchain.document_loaders.base import BaseLoader
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.schema.embeddings import Embeddings
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.schema import BaseRetriever, Document
|
||||
from langchain.text_splitter import TextSplitter
|
||||
|
|
@ -45,7 +45,7 @@ DEFAULT_CUSTOM_COMPONENT_CODE = """from langflow import CustomComponent
|
|||
|
||||
from langchain.llms.base import BaseLLM
|
||||
from langchain.chains import LLMChain
|
||||
from langchain import PromptTemplate
|
||||
from langchain.prompts import PromptTemplate
|
||||
from langchain.schema import Document
|
||||
|
||||
import requests
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from fastapi import HTTPException
|
|||
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
from langflow.interface.custom.component import Component
|
||||
from langflow.interface.custom.directory_reader import DirectoryReader
|
||||
from langflow.services.utils import get_db_manager
|
||||
from langflow.services.getters import get_db_service
|
||||
from langflow.interface.custom.utils import extract_inner_type
|
||||
|
||||
from langflow.utils import validate
|
||||
|
|
@ -95,7 +95,20 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
|
||||
build_method = build_methods[0]
|
||||
|
||||
return build_method["args"]
|
||||
args = build_method["args"]
|
||||
for arg in args:
|
||||
if arg.get("type") == "prompt":
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={
|
||||
"error": "Type hint Error",
|
||||
"traceback": (
|
||||
"Prompt type is not supported in the build method."
|
||||
" Try using PromptTemplate instead."
|
||||
),
|
||||
},
|
||||
)
|
||||
return args
|
||||
|
||||
@property
|
||||
def get_function_entrypoint_return_type(self) -> List[str]:
|
||||
|
|
@ -176,25 +189,25 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
return validate.create_function(self.code, self.function_entrypoint_name)
|
||||
|
||||
def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> Any:
|
||||
from langflow.processing.process import build_sorted_vertices_with_caching
|
||||
from langflow.processing.process import build_sorted_vertices
|
||||
from langflow.processing.process import process_tweaks
|
||||
|
||||
db_manager = get_db_manager()
|
||||
with session_getter(db_manager) as session:
|
||||
db_service = get_db_service()
|
||||
with session_getter(db_service) as session:
|
||||
graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None
|
||||
if not graph_data:
|
||||
raise ValueError(f"Flow {flow_id} not found")
|
||||
if tweaks:
|
||||
graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks)
|
||||
return build_sorted_vertices_with_caching(graph_data)
|
||||
return build_sorted_vertices(graph_data)
|
||||
|
||||
def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]:
|
||||
if not self.user_id:
|
||||
raise ValueError("Session is invalid")
|
||||
try:
|
||||
get_session = get_session or session_getter
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
db_service = get_db_service()
|
||||
with get_session(db_service) as session:
|
||||
flows = session.query(Flow).filter(Flow.user_id == self.user_id).all()
|
||||
return flows
|
||||
except Exception as e:
|
||||
|
|
@ -209,8 +222,8 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
get_session: Optional[Callable] = None,
|
||||
) -> Flow:
|
||||
get_session = get_session or session_getter
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
db_service = get_db_service()
|
||||
with get_session(db_service) as session:
|
||||
if flow_id:
|
||||
flow = session.query(Flow).get(flow_id)
|
||||
elif flow_name:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode
|
||||
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
|
||||
|
||||
|
|
@ -31,12 +31,12 @@ class DocumentLoaderCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
return [
|
||||
documentloader.__name__
|
||||
for documentloader in self.type_to_loader_dict.values()
|
||||
if documentloader.__name__ in settings_manager.settings.DOCUMENTLOADERS
|
||||
or settings_manager.settings.DEV
|
||||
if documentloader.__name__ in settings_service.settings.DOCUMENTLOADERS
|
||||
or settings_service.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from typing import Dict, List, Optional, Type
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import embedding_type_to_cls_dict
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode
|
||||
|
|
@ -33,12 +33,12 @@ class EmbeddingCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
return [
|
||||
embedding.__name__
|
||||
for embedding in self.type_to_loader_dict.values()
|
||||
if embedding.__name__ in settings_manager.settings.EMBEDDINGS
|
||||
or settings_manager.settings.DEV
|
||||
if embedding.__name__ in settings_service.settings.EMBEDDINGS
|
||||
or settings_service.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
import importlib
|
||||
from typing import Any, Type
|
||||
|
||||
from langchain import PromptTemplate
|
||||
from langchain.prompts import PromptTemplate
|
||||
from langchain.agents import Agent
|
||||
from langchain.base_language import BaseLanguageModel
|
||||
from langchain.chains.base import Chain
|
||||
|
|
@ -144,6 +144,8 @@ def import_chain(chain: str) -> Type[Chain]:
|
|||
|
||||
if chain in CUSTOM_CHAINS:
|
||||
return CUSTOM_CHAINS[chain]
|
||||
if chain == "SQLDatabaseChain":
|
||||
return import_class("langchain_experimental.sql.SQLDatabaseChain")
|
||||
return import_class(f"langchain.chains.{chain}")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
import orjson
|
||||
from typing import Any, Callable, Dict, Sequence, Type, TYPE_CHECKING
|
||||
|
||||
from langchain.schema import Document
|
||||
from langchain.agents import agent as agent_module
|
||||
from langchain.agents.agent import AgentExecutor
|
||||
from langchain.agents.agent_toolkits.base import BaseToolkit
|
||||
|
|
@ -40,12 +40,23 @@ if TYPE_CHECKING:
|
|||
from langflow import CustomComponent
|
||||
|
||||
|
||||
def build_vertex_in_params(params: Dict) -> Dict:
|
||||
from langflow.graph.vertex.base import Vertex
|
||||
|
||||
# If any of the values in params is a Vertex, we will build it
|
||||
return {
|
||||
key: value.build() if isinstance(value, Vertex) else value
|
||||
for key, value in params.items()
|
||||
}
|
||||
|
||||
|
||||
def instantiate_class(
|
||||
node_type: str, base_type: str, params: Dict, user_id=None
|
||||
) -> Any:
|
||||
"""Instantiate class from module type and key, and params"""
|
||||
params = convert_params_to_sets(params)
|
||||
params = convert_kwargs(params)
|
||||
|
||||
if node_type in CUSTOM_NODES:
|
||||
if custom_node := CUSTOM_NODES.get(node_type):
|
||||
if hasattr(custom_node, "initialize"):
|
||||
|
|
@ -100,7 +111,7 @@ def instantiate_based_on_type(class_object, base_type, node_type, params, user_i
|
|||
elif base_type == "vectorstores":
|
||||
return instantiate_vectorstore(class_object, params)
|
||||
elif base_type == "documentloaders":
|
||||
return instantiate_documentloader(class_object, params)
|
||||
return instantiate_documentloader(node_type, class_object, params)
|
||||
elif base_type == "textsplitters":
|
||||
return instantiate_textsplitter(class_object, params)
|
||||
elif base_type == "utilities":
|
||||
|
|
@ -289,6 +300,13 @@ def instantiate_embedding(node_type, class_object, params: Dict):
|
|||
|
||||
def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict):
|
||||
search_kwargs = params.pop("search_kwargs", {})
|
||||
# clean up docs or texts to have only documents
|
||||
if "texts" in params:
|
||||
params["documents"] = params.pop("texts")
|
||||
if "documents" in params:
|
||||
params["documents"] = [
|
||||
doc for doc in params["documents"] if isinstance(doc, Document)
|
||||
]
|
||||
if initializer := vecstore_initializer.get(class_object.__name__):
|
||||
vecstore = initializer(class_object, params)
|
||||
else:
|
||||
|
|
@ -303,7 +321,9 @@ def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict):
|
|||
return vecstore
|
||||
|
||||
|
||||
def instantiate_documentloader(class_object: Type[BaseLoader], params: Dict):
|
||||
def instantiate_documentloader(
|
||||
node_type: str, class_object: Type[BaseLoader], params: Dict
|
||||
):
|
||||
if "file_filter" in params:
|
||||
# file_filter will be a string but we need a function
|
||||
# that will be used to filter the files using file_filter
|
||||
|
|
@ -323,6 +343,11 @@ def instantiate_documentloader(class_object: Type[BaseLoader], params: Dict):
|
|||
raise ValueError(
|
||||
"The metadata you provided is not a valid JSON string."
|
||||
) from exc
|
||||
|
||||
if node_type == "WebBaseLoader":
|
||||
if web_path := params.pop("web_path", None):
|
||||
params["web_paths"] = [web_path]
|
||||
|
||||
docs = class_object(**params).load()
|
||||
# Now if metadata is an empty dict, we will not add it to the documents
|
||||
if metadata:
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from langchain.vectorstores import (
|
|||
SupabaseVectorStore,
|
||||
MongoDBAtlasVectorSearch,
|
||||
)
|
||||
|
||||
from langchain.schema import Document
|
||||
import os
|
||||
|
||||
import orjson
|
||||
|
|
@ -201,11 +201,16 @@ def initialize_chroma(class_object: Type[Chroma], params: dict):
|
|||
if "texts" in params:
|
||||
params["documents"] = params.pop("texts")
|
||||
for doc in params["documents"]:
|
||||
if not isinstance(doc, Document):
|
||||
# remove any non-Document objects from the list
|
||||
params["documents"].remove(doc)
|
||||
continue
|
||||
if doc.metadata is None:
|
||||
doc.metadata = {}
|
||||
for key, value in doc.metadata.items():
|
||||
if value is None:
|
||||
doc.metadata[key] = ""
|
||||
|
||||
chromadb = class_object.from_documents(**params)
|
||||
if persist:
|
||||
chromadb.persist()
|
||||
|
|
|
|||
|
|
@ -1,19 +1,4 @@
|
|||
from langflow.interface.agents.base import agent_creator
|
||||
from langflow.interface.chains.base import chain_creator
|
||||
from langflow.interface.document_loaders.base import documentloader_creator
|
||||
from langflow.interface.embeddings.base import embedding_creator
|
||||
from langflow.interface.llms.base import llm_creator
|
||||
from langflow.interface.memories.base import memory_creator
|
||||
from langflow.interface.prompts.base import prompt_creator
|
||||
from langflow.interface.text_splitters.base import textsplitter_creator
|
||||
from langflow.interface.toolkits.base import toolkits_creator
|
||||
from langflow.interface.tools.base import tool_creator
|
||||
from langflow.interface.utilities.base import utility_creator
|
||||
from langflow.interface.vector_store.base import vectorstore_creator
|
||||
from langflow.interface.wrappers.base import wrapper_creator
|
||||
from langflow.interface.output_parsers.base import output_parser_creator
|
||||
from langflow.interface.retrievers.base import retriever_creator
|
||||
from langflow.interface.custom.base import custom_component_creator
|
||||
from langflow.services.getters import get_settings_service
|
||||
from langflow.utils.lazy_load import LazyLoadDictBase
|
||||
|
||||
|
||||
|
|
@ -33,24 +18,10 @@ class AllTypesDict(LazyLoadDictBase):
|
|||
}
|
||||
|
||||
def get_type_dict(self):
|
||||
return {
|
||||
"agents": agent_creator.to_list(),
|
||||
"prompts": prompt_creator.to_list(),
|
||||
"llms": llm_creator.to_list(),
|
||||
"tools": tool_creator.to_list(),
|
||||
"chains": chain_creator.to_list(),
|
||||
"memory": memory_creator.to_list(),
|
||||
"toolkits": toolkits_creator.to_list(),
|
||||
"wrappers": wrapper_creator.to_list(),
|
||||
"documentLoaders": documentloader_creator.to_list(),
|
||||
"vectorStore": vectorstore_creator.to_list(),
|
||||
"embeddings": embedding_creator.to_list(),
|
||||
"textSplitters": textsplitter_creator.to_list(),
|
||||
"utilities": utility_creator.to_list(),
|
||||
"outputParsers": output_parser_creator.to_list(),
|
||||
"retrievers": retriever_creator.to_list(),
|
||||
"custom_components": custom_component_creator.to_list(),
|
||||
}
|
||||
from langflow.interface.types import get_all_types_dict
|
||||
|
||||
settings_service = get_settings_service()
|
||||
return get_all_types_dict(settings_service=settings_service)
|
||||
|
||||
|
||||
lazy_load_dict = AllTypesDict()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from typing import Dict, List, Optional, Type
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import llm_type_to_cls_dict
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.llms import LLMFrontendNode
|
||||
from loguru import logger
|
||||
|
|
@ -34,12 +34,12 @@ class LLMCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
return [
|
||||
llm.__name__
|
||||
for llm in self.type_to_loader_dict.values()
|
||||
if llm.__name__ in settings_manager.settings.LLMS
|
||||
or settings_manager.settings.DEV
|
||||
if llm.__name__ in settings_service.settings.LLMS
|
||||
or settings_service.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from typing import Dict, List, Optional, Type
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.custom_lists import memory_type_to_cls_dict
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.base import FrontendNode
|
||||
from langflow.template.frontend_node.memories import MemoryFrontendNode
|
||||
|
|
@ -49,12 +49,12 @@ class MemoryCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
return [
|
||||
memory.__name__
|
||||
for memory in self.type_to_loader_dict.values()
|
||||
if memory.__name__ in settings_manager.settings.MEMORIES
|
||||
or settings_manager.settings.DEV
|
||||
if memory.__name__ in settings_service.settings.MEMORIES
|
||||
or settings_service.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from langchain import output_parsers
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode
|
||||
from loguru import logger
|
||||
|
|
@ -24,7 +24,7 @@ class OutputParserCreator(LangChainTypeCreator):
|
|||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
if self.type_dict is None:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
self.type_dict = {
|
||||
output_parser_name: import_class(
|
||||
f"langchain.output_parsers.{output_parser_name}"
|
||||
|
|
@ -35,8 +35,8 @@ class OutputParserCreator(LangChainTypeCreator):
|
|||
self.type_dict = {
|
||||
name: output_parser
|
||||
for name, output_parser in self.type_dict.items()
|
||||
if name in settings_manager.settings.OUTPUT_PARSERS
|
||||
or settings_manager.settings.DEV
|
||||
if name in settings_service.settings.OUTPUT_PARSERS
|
||||
or settings_service.settings.DEV
|
||||
}
|
||||
return self.type_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from langchain import prompts
|
|||
from langflow.custom.customs import get_custom_nodes
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.prompts import PromptFrontendNode
|
||||
from loguru import logger
|
||||
|
|
@ -21,7 +21,7 @@ class PromptCreator(LangChainTypeCreator):
|
|||
|
||||
@property
|
||||
def type_to_loader_dict(self) -> Dict:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
if self.type_dict is None:
|
||||
self.type_dict = {
|
||||
prompt_name: import_class(f"langchain.prompts.{prompt_name}")
|
||||
|
|
@ -36,8 +36,8 @@ class PromptCreator(LangChainTypeCreator):
|
|||
self.type_dict = {
|
||||
name: prompt
|
||||
for name, prompt in self.type_dict.items()
|
||||
if name in settings_manager.settings.PROMPTS
|
||||
or settings_manager.settings.DEV
|
||||
if name in settings_service.settings.PROMPTS
|
||||
or settings_service.settings.DEV
|
||||
}
|
||||
return self.type_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from langchain import retrievers
|
|||
|
||||
from langflow.interface.base import LangChainTypeCreator
|
||||
from langflow.interface.importing.utils import import_class
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.services.getters import get_settings_service
|
||||
|
||||
from langflow.template.frontend_node.retrievers import RetrieverFrontendNode
|
||||
from loguru import logger
|
||||
|
|
@ -49,12 +49,12 @@ class RetrieverCreator(LangChainTypeCreator):
|
|||
return None
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_service = get_settings_service()
|
||||
return [
|
||||
retriever
|
||||
for retriever in self.type_to_loader_dict.keys()
|
||||
if retriever in settings_manager.settings.RETRIEVERS
|
||||
or settings_manager.settings.DEV
|
||||
if retriever in settings_service.settings.RETRIEVERS
|
||||
or settings_service.settings.DEV
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,22 +1,9 @@
|
|||
from typing import Any, Dict, Tuple
|
||||
from langflow.services.cache.utils import memoize_dict
|
||||
from typing import Dict, Tuple
|
||||
from langflow.graph import Graph
|
||||
from loguru import logger
|
||||
|
||||
|
||||
@memoize_dict(maxsize=10)
|
||||
def build_langchain_object_with_caching(data_graph):
|
||||
"""
|
||||
Build langchain object from data_graph.
|
||||
"""
|
||||
|
||||
logger.debug("Building langchain object")
|
||||
graph = Graph.from_payload(data_graph)
|
||||
return graph.build()
|
||||
|
||||
|
||||
@memoize_dict(maxsize=10)
|
||||
def build_sorted_vertices_with_caching(data_graph) -> Tuple[Any, Dict]:
|
||||
def build_sorted_vertices(data_graph) -> Tuple[Graph, Dict]:
|
||||
"""
|
||||
Build langchain object from data_graph.
|
||||
"""
|
||||
|
|
@ -29,7 +16,7 @@ def build_sorted_vertices_with_caching(data_graph) -> Tuple[Any, Dict]:
|
|||
vertex.build()
|
||||
if vertex.artifacts:
|
||||
artifacts.update(vertex.artifacts)
|
||||
return graph.build(), artifacts
|
||||
return graph, artifacts
|
||||
|
||||
|
||||
def build_langchain_object(data_graph):
|
||||
|
|
@ -58,8 +45,12 @@ def get_memory_key(langchain_object):
|
|||
"chat_history": "history",
|
||||
"history": "chat_history",
|
||||
}
|
||||
memory_key = langchain_object.memory.memory_key
|
||||
return mem_key_dict.get(memory_key)
|
||||
# Check if memory_key attribute exists
|
||||
if hasattr(langchain_object.memory, "memory_key"):
|
||||
memory_key = langchain_object.memory.memory_key
|
||||
return mem_key_dict.get(memory_key)
|
||||
else:
|
||||
return None # or some other default value or action
|
||||
|
||||
|
||||
def update_memory_keys(langchain_object, possible_new_mem_key):
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue