diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml
new file mode 100644
index 000000000..52f1bd1ee
--- /dev/null
+++ b/.github/workflows/docker-build.yml
@@ -0,0 +1,54 @@
+name: Docker Build and Push
+on:
+ workflow_call:
+ inputs:
+ version:
+ required: true
+ type: string
+ release_type:
+ required: true
+ type: string
+ workflow_dispatch:
+ inputs:
+ version:
+ required: true
+ type: string
+ release_type:
+ required: true
+ type: choice
+ options:
+ - base
+ - main
+
+jobs:
+ docker_build:
+ name: Build Docker Image
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set Dockerfile and Tags
+ id: set-vars
+ run: |
+ if [ "${{ inputs.release_type }}" == "base" ]; then
+ echo "DOCKERFILE=./docker/build_and_push_base.Dockerfile" >> $GITHUB_ENV
+ echo "TAGS=langflowai/langflow:base-${{ inputs.version }}" >> $GITHUB_ENV
+ else
+ echo "DOCKERFILE=./docker/build_and_push.Dockerfile" >> $GITHUB_ENV
+ echo "TAGS=langflowai/langflow:${{ inputs.version }},langflowai/langflow:1.0-alpha" >> $GITHUB_ENV
+ fi
+ - name: Build and push
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: true
+ file: ${{ env.DOCKERFILE }}
+ tags: ${{ env.TAGS }}
diff --git a/.github/workflows/pre-release-base.yml b/.github/workflows/pre-release-base.yml
index cbcfe3ad6..d087fc183 100644
--- a/.github/workflows/pre-release-base.yml
+++ b/.github/workflows/pre-release-base.yml
@@ -72,6 +72,6 @@ jobs:
with:
context: .
push: true
- file: ./build_and_push_base.Dockerfile
+ file: ./docker/build_and_push_base.Dockerfile
tags: |
langflowai/langflow:base-${{ needs.release.outputs.version }}
diff --git a/.github/workflows/pre-release-langflow.yml b/.github/workflows/pre-release-langflow.yml
index 5dcd69617..82cb580f3 100644
--- a/.github/workflows/pre-release-langflow.yml
+++ b/.github/workflows/pre-release-langflow.yml
@@ -78,7 +78,7 @@ jobs:
with:
context: .
push: true
- file: ./build_and_push.Dockerfile
+ file: ./docker/build_and_push.Dockerfile
tags: |
langflowai/langflow:${{ needs.release.outputs.version }}
langflowai/langflow:1.0-alpha
diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml
new file mode 100644
index 000000000..edd515f01
--- /dev/null
+++ b/.github/workflows/pre-release.yml
@@ -0,0 +1,101 @@
+name: Langflow Pre-release (Unified)
+run-name: Langflow (${{inputs.release_type}}) Pre-release by @${{ github.actor }}
+on:
+ workflow_dispatch:
+ inputs:
+ release_package:
+ description: "Release package"
+ required: true
+ type: boolean
+ default: false
+ release_type:
+ description: "Type of release (base or main)"
+ required: true
+ type: choice
+ options:
+ - base
+ - main
+
+env:
+ POETRY_VERSION: "1.8.2"
+
+jobs:
+ release:
+ name: Release Langflow
+ if: inputs.release_package == true
+ runs-on: ubuntu-latest
+ outputs:
+ version: ${{ steps.check-version.outputs.version }}
+ steps:
+ - uses: actions/checkout@v4
+ - name: Install poetry
+ run: pipx install poetry==$POETRY_VERSION
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ cache: "poetry"
+ - name: Check Version
+ id: check-version
+ run: |
+ if [ "${{ inputs.release_type }}" == "base" ]; then
+ version=$(cd src/backend/base && poetry version --short)
+ last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
+ else
+ version=$(poetry version --short)
+ last_released_version=$(curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
+ fi
+ if [ "$version" = "$last_released_version" ]; then
+ echo "Version $version is already released. Skipping release."
+ exit 1
+ else
+ echo version=$version >> $GITHUB_OUTPUT
+ fi
+ - name: Build project for distribution
+ run: |
+ if [ "${{ inputs.release_type }}" == "base" ]; then
+ make build base=true
+ else
+ make build main=true
+ fi
+ - name: Publish to PyPI
+ env:
+ POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
+ run: |
+ if [ "${{ inputs.release_type }}" == "base" ]; then
+ make publish base=true
+ else
+ make publish main=true
+ fi
+
+ call_docker_build:
+ name: Call Docker Build Workflow
+ runs-on: ubuntu-latest
+ needs: release
+ steps:
+ - uses: actions/checkout@v4
+ - uses: ./.github/workflows/docker-build.yml
+ with:
+ version: ${{ needs.release.outputs.version }}
+ release_type: ${{ inputs.release_type }}
+
+ create_release:
+ name: Create Release
+ runs-on: ubuntu-latest
+ needs: [call_docker_build, release]
+ if: ${{ inputs.release_type == 'main' }}
+ steps:
+ - uses: actions/download-artifact@v4
+ with:
+ name: dist
+ path: dist
+ - name: Create Release
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "dist/*"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ draft: false
+ generateReleaseNotes: true
+ prerelease: true
+ tag: v${{ needs.release.outputs.version }}
+ commit: dev
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index c19a491c3..06df72e9f 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -50,7 +50,7 @@ jobs:
with:
context: .
push: true
- file: ./build_and_push.Dockerfile
+ file: ./docker/build_and_push.Dockerfile
tags: |
langflowai/langflow:${{ steps.check-version.outputs.version }}
langflowai/langflow:latest
diff --git a/.github/workflows/typescript_test.yml b/.github/workflows/typescript_test.yml
index dc5bc061e..be081bec6 100644
--- a/.github/workflows/typescript_test.yml
+++ b/.github/workflows/typescript_test.yml
@@ -19,8 +19,8 @@ jobs:
strategy:
fail-fast: false
matrix:
- shardIndex: [1]
- shardTotal: [1]
+ shardIndex: [1, 2, 3, 4]
+ shardTotal: [4]
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -30,7 +30,15 @@ jobs:
id: setup-node
with:
node-version: ${{ env.NODE_VERSION }}
- cache: "npm"
+
+ - name: Cache Node.js dependencies
+ uses: actions/cache@v4
+ id: npm-cache
+ with:
+ path: ~/.npm
+ key: ${{ runner.os }}-node-${{ hashFiles('src/frontend/package-lock.json') }}
+ restore-keys: |
+ ${{ runner.os }}-node-
- name: Install Node.js dependencies
run: |
@@ -80,7 +88,7 @@ jobs:
- name: Run Playwright Tests
run: |
cd src/frontend
- npx playwright test
+ npx playwright test --shard ${{ matrix.shardIndex }}/${{ matrix.shardTotal }} --workers 2
- name: Upload blob report to GitHub Actions Artifacts
if: always()
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 60505515a..f07a219d8 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -18,7 +18,10 @@ repos:
hooks:
- id: check-case-conflict
- id: end-of-file-fixer
+ # python, js and ts only
+ files: \.(py|js|ts)$
- id: mixed-line-ending
+ files: \.(py|js|ts)$
args:
- --fix=lf
- id: trailing-whitespace
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
deleted file mode 100644
index 82dfe1f85..000000000
--- a/.readthedocs.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-# Read the Docs configuration file for Sphinx projects
-# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
-
-# Required
-version: 2
-
-# Set the OS, Python version and other tools you might need
-build:
- os: ubuntu-22.04
- tools:
- python: "3.11"
- # You can also specify other tool versions:
- # nodejs: "19"
- # rust: "1.64"
- # golang: "1.19"
-
-# Build documentation in the "docs/" directory with Sphinx
-sphinx:
- configuration: docs/conf.py
-
-# Optionally build your docs in additional formats such as PDF and ePub
-# formats:
-# - pdf
-# - epub
-
-# Optional but recommended, declare the Python requirements required
-# to build your documentation
-# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
-# python:
-# install:
-# - requirements: docs/requirements.txt
\ No newline at end of file
diff --git a/README.md b/README.md
index a753ea830..626a472dd 100644
--- a/README.md
+++ b/README.md
@@ -65,7 +65,6 @@ Each option is detailed below:
- `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`.
- `--timeout`: Sets the worker timeout in seconds. The default is `60`.
- `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`.
-- `--config`: Defines the path to the configuration file. The default is `config.yaml`.
- `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`.
- `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`.
- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`.
diff --git a/base.Dockerfile b/base.Dockerfile
deleted file mode 100644
index 2325ece79..000000000
--- a/base.Dockerfile
+++ /dev/null
@@ -1,99 +0,0 @@
-
-
-# syntax=docker/dockerfile:1
-# Keep this syntax directive! It's used to enable Docker BuildKit
-
-# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865
-# but I try to keep it updated (see history)
-
-################################
-# PYTHON-BASE
-# Sets up all our shared environment variables
-################################
-FROM python:3.10-slim as python-base
-
-# python
-ENV PYTHONUNBUFFERED=1 \
- # prevents python creating .pyc files
- PYTHONDONTWRITEBYTECODE=1 \
- \
- # pip
- PIP_DISABLE_PIP_VERSION_CHECK=on \
- PIP_DEFAULT_TIMEOUT=100 \
- \
- # poetry
- # https://python-poetry.org/docs/configuration/#using-environment-variables
- POETRY_VERSION=1.8.2 \
- # make poetry install to this location
- POETRY_HOME="/opt/poetry" \
- # make poetry create the virtual environment in the project's root
- # it gets named `.venv`
- POETRY_VIRTUALENVS_IN_PROJECT=true \
- # do not ask any interactive question
- POETRY_NO_INTERACTION=1 \
- \
- # paths
- # this is where our requirements + virtual environment will live
- PYSETUP_PATH="/opt/pysetup" \
- VENV_PATH="/opt/pysetup/.venv"
-
-
-# prepend poetry and venv to path
-ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
-
-
-################################
-# BUILDER-BASE
-# Used to build deps + create our virtual environment
-################################
-FROM python-base as builder-base
-RUN apt-get update \
- && apt-get install --no-install-recommends -y \
- # deps for installing poetry
- curl \
- # deps for building python deps
- build-essential
-
-
-# install poetry - respects $POETRY_VERSION & $POETRY_HOME
-# The --mount will mount the buildx cache directory to where
-# Poetry and Pip store their cache so that they can reuse it
-RUN --mount=type=cache,target=/root/.cache \
- curl -sSL https://install.python-poetry.org | python3 -
-
-# copy project requirement files here to ensure they will be cached.
-WORKDIR $PYSETUP_PATH
-# Copy just one file to avoid rebuilding the whole image
-COPY poetry.lock pyproject.toml ./
-COPY ./src/backend/langflow ./src/backend/langflow
-COPY ./src/backend/base/pyproject.toml ./src/backend/base/pyproject.toml
-# Copy README.md to the build context
-COPY README.md .
-# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally
-RUN --mount=type=cache,target=/root/.cache \
- poetry install --without dev --extras deploy
-
-
-################################
-# DEVELOPMENT
-# Image used during development / testing
-################################
-FROM python-base as development
-WORKDIR $PYSETUP_PATH
-
-# copy in our built poetry + venv
-COPY --from=builder-base $POETRY_HOME $POETRY_HOME
-COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH
-
-# Copy just one file to avoid rebuilding the whole image
-COPY ./src/backend/langflow ./src/backend/langflow
-# quicker install as runtime deps are already installed
-RUN --mount=type=cache,target=/root/.cache \
- poetry install --with=dev --extras deploy
-
-# copy in our app code
-COPY ./src/backend ./src/backend
-RUN --mount=type=cache,target=/root/.cache \
- poetry install --with=dev --extras deploy
-COPY ./tests ./tests=
-
diff --git a/docker-compose.debug.yml b/docker-compose.debug.yml
deleted file mode 100644
index f81faf8d4..000000000
--- a/docker-compose.debug.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-version: "3.4"
-
-services:
- backend:
- volumes:
- - ./:/app
- build:
- context: ./
- dockerfile: ./dev.Dockerfile
- command:
- [
- "sh",
- "-c",
- "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --loop asyncio",
- ]
- ports:
- - 7860:7860
- - 5678:5678
- restart: on-failure
-
- frontend:
- build:
- context: ./src/frontend
- dockerfile: ./dev.Dockerfile
- args:
- - BACKEND_URL=http://backend:7860
- ports:
- - "3000:3000"
- volumes:
- - ./src/frontend/public:/home/node/app/public
- - ./src/frontend/src:/home/node/app/src
- - ./src/frontend/package.json:/home/node/app/package.json
- restart: on-failure
diff --git a/.dockerignore b/docker/.dockerignore
similarity index 100%
rename from .dockerignore
rename to docker/.dockerignore
diff --git a/build_and_push.Dockerfile b/docker/build_and_push.Dockerfile
similarity index 94%
rename from build_and_push.Dockerfile
rename to docker/build_and_push.Dockerfile
index e27599454..f5378743b 100644
--- a/build_and_push.Dockerfile
+++ b/docker/build_and_push.Dockerfile
@@ -78,9 +78,10 @@ RUN $POETRY_HOME/bin/poetry build
# Copy virtual environment and built .tar.gz from builder base
RUN useradd -m -u 1000 user
+RUN chown -R user:user /app
+USER user
# Install the package from the .tar.gz
-RUN python -m pip install /app/dist/*.tar.gz --user
-
+RUN python -m pip install /app/dist/*.tar.gz
ENTRYPOINT ["python", "-m", "langflow", "run"]
-CMD ["--host", "0.0.0.0", "--port", "7860"]
\ No newline at end of file
+CMD ["--host", "0.0.0.0", "--port", "7860"]
diff --git a/build_and_push_base.Dockerfile b/docker/build_and_push_base.Dockerfile
similarity index 92%
rename from build_and_push_base.Dockerfile
rename to docker/build_and_push_base.Dockerfile
index cbcdbceee..bef6cc3ba 100644
--- a/build_and_push_base.Dockerfile
+++ b/docker/build_and_push_base.Dockerfile
@@ -78,13 +78,15 @@ RUN cd src/frontend && npm run build
COPY src/backend ./src/backend
RUN cp -r src/frontend/build src/backend/base/langflow/frontend
RUN rm -rf src/backend/base/dist
-RUN cd src/backend/base && $POETRY_HOME/bin/poetry build --format sdist
+RUN cd src/backend/base && $POETRY_HOME/bin/poetry build
# Copy virtual environment and built .tar.gz from builder base
RUN useradd -m -u 1000 user
+RUN chown -R user:user /app
+USER user
# Install the package from the .tar.gz
-RUN python -m pip install /app/dist/*.tar.gz --user
+RUN python -m pip install /app/src/backend/base/dist/*.tar.gz
ENTRYPOINT ["python", "-m", "langflow", "run"]
-CMD ["--host", "0.0.0.0", "--port", "7860"]
\ No newline at end of file
+CMD ["--host", "0.0.0.0", "--port", "7860"]
diff --git a/docker-compose.yml b/docker/cdk-docker-compose.yml
similarity index 100%
rename from docker-compose.yml
rename to docker/cdk-docker-compose.yml
diff --git a/cdk.Dockerfile b/docker/cdk.Dockerfile
similarity index 100%
rename from cdk.Dockerfile
rename to docker/cdk.Dockerfile
diff --git a/container-cmd-cdk.sh b/docker/container-cmd-cdk.sh
similarity index 100%
rename from container-cmd-cdk.sh
rename to docker/container-cmd-cdk.sh
diff --git a/dev.Dockerfile b/docker/dev.Dockerfile
similarity index 100%
rename from dev.Dockerfile
rename to docker/dev.Dockerfile
diff --git a/Dockerfile b/docker/render.Dockerfile
similarity index 100%
rename from Dockerfile
rename to docker/render.Dockerfile
diff --git a/docker_example/pre.docker-compose.yml b/docker_example/pre.docker-compose.yml
index b2fa00903..229221fff 100644
--- a/docker_example/pre.docker-compose.yml
+++ b/docker_example/pre.docker-compose.yml
@@ -10,9 +10,9 @@ services:
environment:
- LANGFLOW_DATABASE_URL=postgresql://langflow:langflow@postgres:5432/langflow
# This variable defines where the logs, file storage, monitor data and secret keys are stored.
- - LANGFLOW_CONFIG_DIR=/var/lib/langflow
+ - LANGFLOW_CONFIG_DIR=app/langflow
volumes:
- - langflow-data:/var/lib/langflow
+ - langflow-data:app/langflow
postgres:
image: postgres:16
diff --git a/docs/docs/administration/api.mdx b/docs/docs/administration/api.mdx
index 25dbeb31e..ae7530f30 100644
--- a/docs/docs/administration/api.mdx
+++ b/docs/docs/administration/api.mdx
@@ -4,15 +4,13 @@ import Admonition from "@theme/Admonition";
# API Keys
-## Introduction
-
-Langflow offers an API Key functionality that allows users to access their individual components and flows without going through traditional login authentication. The API Key is a user-specific token that can be included in the request's header or query parameter to authenticate API calls. The following documentation outlines how to generate, use, and manage these API Keys in Langflow.
+Langflow provides an API key functionality that allows users to access their individual components and flows without traditional login authentication. The API key is a user-specific token that can be included in the request header or query parameter to authenticate API calls. This documentation outlines how to generate, use, and manage API keys in Langflow.
- This feature requires the `LANGFLOW_AUTO_LOGIN` environment variable to be set
- to `False`. The default user and password are set using _`LANGFLOW_SUPERUSER`_
- and _`LANGFLOW_SUPERUSER_PASSWORD`_ environment variables. Default values are
- _`langflow`_ and _`langflow`_ respectively.
+ This feature requires the LANGFLOW_AUTO_LOGIN environment variable to be set
+ to False. The default user and password are set using the LANGFLOW_SUPERUSER
+ and LANGFLOW_SUPERUSER_PASSWORD environment variables. The default values are
+ langflow and langflow, respectively.
## Generating an API Key
@@ -93,7 +91,7 @@ print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key))
### Using the Query Parameter
-Alternatively, you can include the API key as a query parameter in the URL:
+Include the API key as a query parameter in the URL:
```bash
curl -X POST \
@@ -146,9 +144,9 @@ print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key))
## Security Considerations
-- **Visibility**: The API key won't be retrievable again through the UI for security reasons.
-- **Scope**: The key only allows access to the flows and components of the specific user to whom it was issued.
+- **Visibility**: For security reasons, the API key cannot be retrieved again through the UI.
+- **Scope**: The key allows access only to the flows and components of the specific user to whom it was issued.
## Revoking an API Key
-To revoke an API key, simply delete it from the UI. This will immediately invalidate the key and prevent it from being used again.
+To revoke an API key, delete it from the UI. This action immediately invalidates the key and prevents it from being used again.
diff --git a/docs/docs/administration/cli.mdx b/docs/docs/administration/cli.mdx
index 4f7c92983..8048eff45 100644
--- a/docs/docs/administration/cli.mdx
+++ b/docs/docs/administration/cli.mdx
@@ -1,4 +1,4 @@
-# 🖥️ Command Line Interface (CLI)
+# Command Line Interface (CLI)
## Overview
@@ -19,7 +19,6 @@ Each option is detailed below:
- `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`.
- `--timeout`: Sets the worker timeout in seconds. The default is `60`.
- `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`.
-- `--config`: Defines the path to the configuration file. The default is `config.yaml`.
- `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`.
- `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`.
- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`.
diff --git a/docs/docs/administration/custom-component.mdx b/docs/docs/administration/custom-component.mdx
index 6decb3833..e82c56851 100644
--- a/docs/docs/administration/custom-component.mdx
+++ b/docs/docs/administration/custom-component.mdx
@@ -391,13 +391,13 @@ The recommended way to load custom components is to set the _`LANGFLOW_COMPONENT
```bash
export LANGFLOW_COMPONENTS_PATH='["/path/to/components"]'
-langflow
+langflow run
```
Alternatively, you can specify the path to your custom components using the _`--components-path`_ argument when running the Langflow CLI, as shown below:
```bash
-langflow --components-path /path/to/components
+langflow run --components-path /path/to/components
```
Langflow will attempt to load all of the components found in the specified directory. If a component fails to load due to errors in the component's code, Langflow will print an error message to the console but will continue loading the rest of the components.
diff --git a/docs/docs/administration/global-env.mdx b/docs/docs/administration/global-env.mdx
index fca7ec16a..249981409 100644
--- a/docs/docs/administration/global-env.mdx
+++ b/docs/docs/administration/global-env.mdx
@@ -1,5 +1,6 @@
import ZoomableImage from "/src/theme/ZoomableImage.js";
import Admonition from "@theme/Admonition";
+import ReactPlayer from "react-player";
# Global environment variables
@@ -43,3 +44,11 @@ You now have a `openai_api_key` global environment variable for your Langflow pr
4. To view and manage your project's global environment variables, visit **Settings** > **Variables and Secrets**.
For more on variables in HuggingFace Spaces, see [Managing Secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets).
+
+## Video
+
+
+
+
diff --git a/docs/docs/administration/langfuse_integration.mdx b/docs/docs/administration/langfuse_integration.mdx
deleted file mode 100644
index 81f06e787..000000000
--- a/docs/docs/administration/langfuse_integration.mdx
+++ /dev/null
@@ -1,49 +0,0 @@
-# Integrating Langfuse with Langflow
-
-## Introduction
-
-Langfuse is an open-source tracing and analytics tool designed for LLM applications. Integrating Langfuse with Langflow provides detailed production traces and granular insights into quality, cost, and latency. This integration allows you to monitor and debug your Langflow's chat or APIs easily.
-
-## Step-by-Step Instructions
-
-### Step 1: Create a Langfuse account
-
-1. Go to [Langfuse](https://langfuse.com) and click on the "Sign In" button in the top right corner.
-2. Click on the "Sign Up" button and create an account.
-3. Once logged in, click on "Settings" and then on "Create new API keys."
-4. Copy the Public key and the Secret Key and save them somewhere safe.
- {/* Add these keys to your environment variables in the following step. */}
-
-### Step 2: Set up Langfuse in Langflow
-
-1. **Export the Environment Variables**: You'll need to export the environment variables `LANGFLOW_LANGFUSE_SECRET_KEY` and `LANGFLOW_LANGFUSE_PUBLIC_KEY` with the values obtained in Step 1.
-
- You can do this by executing the following commands in your terminal:
-
- ```bash
- export LANGFLOW_LANGFUSE_SECRET_KEY=
- export LANGFLOW_LANGFUSE_PUBLIC_KEY=
- ```
-
- Alternatively, you can run the Langflow CLI command:
-
- ```bash
- LANGFLOW_LANGFUSE_SECRET_KEY= LANGFLOW_LANGFUSE_PUBLIC_KEY= langflow
- ```
-
- If you are self-hosting Langfuse, you can also set the environment variable `LANGFLOW_LANGFUSE_HOST` to point to your Langfuse instance. By default, Langfuse points to the cloud instance at `https://cloud.langfuse.com`.
-
-2. **Verify Integration**: Ensure that the environment variables are set correctly by checking their existence in your environment, for example by running:
-
- ```bash
- echo $LANGFLOW_LANGFUSE_SECRET_KEY
- echo $LANGFLOW_LANGFUSE_PUBLIC_KEY
- ```
-
-3. **Monitor Langflow**: Now, whenever you use Langflow's chat or API, you will be able to see the tracing of your conversations in Langfuse.
-
-That's it! You have successfully integrated Langfuse with Langflow, enhancing observability and debugging capabilities for your LLM application.
-
----
-
-Note: For more details or customized configurations, please refer to the official [Langfuse documentation](https://langfuse.com/docs/integrations/langchain).
diff --git a/docs/docs/administration/playground.mdx b/docs/docs/administration/playground.mdx
index 57f3f2de0..08166729e 100644
--- a/docs/docs/administration/playground.mdx
+++ b/docs/docs/administration/playground.mdx
@@ -14,7 +14,7 @@ It even works for flows hosted on the Langflow store!
As long as you have a flow's environment variables set, you can run it by clicking the **Playground** button.
-1. From your **Collections** page, click **Playground** in one of your flows.
+1. From your **Collections** page, click the **Playground** in one of your flows.
The **Playground** window opens.
2. Chat with your bot as you normally would, all without having to open the editor.
+
+## Video
+
+
+
+
diff --git a/docs/docs/getting-started/canvas.mdx b/docs/docs/getting-started/canvas.mdx
new file mode 100644
index 000000000..0e1a6626c
--- /dev/null
+++ b/docs/docs/getting-started/canvas.mdx
@@ -0,0 +1,217 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+import Admonition from "@theme/Admonition";
+
+# 🎨 Langflow canvas
+
+The **Langflow canvas** is the central hub of Langflow, where you'll assemble new flows from components, run them, and see the results.
+
+To get a feel for the canvas, we'll examine a basic prompting flow.
+You can either build this flow yourself, or select **New Project** > **Basic prompting** to open a canvas with the flow pre-built.
+
+
+
+For more on the difference between flows, components, collections, and projects, see [Flows, collections, components, and projects](./flows-components-collections.mdx).
+
+## Components
+
+A component is a building block of a flow.
+
+
+ During the flow creation process, you will notice handles (colored circles)
+ attached to one or both sides of a component. These handles represent the
+ availability to connect to other components. Hover over a handle to see
+ connection details.
+
+
+
+ For example, if you select a ConversationChain component, you
+ will see orange o and purple{" "}
+ o input handles. They indicate that
+ this component accepts an LLM and a Memory component as inputs. The red
+ asterisk * means that at least one input
+ of that type is required.
+
+
+{" "}
+
+
+
+
+In the top right corner of the component, you'll find the component status icon ().
+Run the flow by clicking the **Playground** button at the bottom right of the canvas.
+
+Once the validation is complete, the status of each validated component should turn green ().
+To debug, hover over the component status to see the outputs.
+
+
+
+---
+
+### Component menu
+
+Each component is a little unique, but they will all have a menu bar on top that looks something like this.
+The menu options are **Code**, **Save**, **Duplicate**, and **More**.
+
+
+
+#### Code
+
+The **Code** button displays your component's Python code.
+You can modify the code and save it.
+
+#### Save
+
+Save your component to the **Saved** components folder for re-use.
+
+#### Duplicate
+
+Duplicate your component in the canvas.
+
+#### More
+
+**Advanced** - modify the parameters of your component.
+
+
+
+
+
+**Copy** - copy your component.
+
+**Share** - share your component to the Langflow store.
+
+**Docs** - view documentation for your component.
+
+**Delete** - delete your component.
+
+### Group multiple components
+
+Components without input or output nodes can be grouped into a single component for re-use.
+This is useful for combining large flows into single components (like RAG with a vector database, for example) and saves space in the canvas.
+
+1. Hold **Shift** and drag to select the **Prompt** and **OpenAI** components.
+2. Select **Group**.
+3. The components merge into a single component.
+4. To save the new component, select **Save**. It can now be re-used from the **Saved** components folder.
+
+## Playground
+
+Run your flow by clicking the **Playground** button.
+
+For more, see [Playground](../administration/playground.mdx).
+
+## API
+
+The **API** button opens the API window, where Langflow presents code for integrating your flow into external applications.
+
+Modify the call's parameters in the **Tweaks** window, click the **Copy Code** or **Download** buttons, and paste your code where you want to use it.
+
+
+
+### curl
+
+The **curl** tab displays sample code for posting a query to your flow.
+Modify the `input_value` to change your input message.
+
+```curl
+curl -X POST \
+ http://127.0.0.1:7863/api/v1/run/f2eefd80-bb91-4190-9279-0d6ffafeaac4\?stream\=false \
+ -H 'Content-Type: application/json'\
+ -d '{"input_value": "is anybody there?",
+ "output_type": "chat",
+ "input_type": "chat",
+ "tweaks": {
+ "Prompt-uxBqP": {},
+ "OpenAIModel-k39HS": {},
+ "ChatOutput-njtka": {},
+ "ChatInput-P3fgL": {}
+}}'
+```
+
+Result:
+
+```
+{"session_id":"f2eefd80-bb91-4190-9279-0d6ffafeaac4:53856a772b8e1cfcb3dd2e71576b5215399e95bae318d3c02101c81b7c252da3","outputs":[{"inputs":{"input_value":"is anybody there?"},"outputs":[{"results":{"result":"Arrr, me hearties! Aye, this be Captain [Your Name] speakin'. What be ye needin', matey?"},"artifacts":{"message":"Arrr, me hearties! Aye, this be Captain [Your Name] speakin'. What be ye needin', matey?","sender":"Machine","sender_name":"AI"},"messages":[{"message":"Arrr, me hearties! Aye, this be Captain [Your Name] speakin'. What be ye needin', matey?","sender":"Machine","sender_name":"AI","component_id":"ChatOutput-njtka"}],"component_display_name":"Chat Output","component_id":"ChatOutput-njtka"}]}]}%
+```
+
+### Python API
+
+The **Python API** tab displays code to interact with your flow using the Python HTTP requests library.
+
+### Python Code
+
+The **Python Code** tab displays code to interact with your flow's `.json` file using the Langflow runtime.
+
+### Chat Widget HTML
+
+The **Chat Widget HTML** tab displays code that can be inserted in the `` of your HTML to interact with your flow.
+For more, see the [Chat widget documentation](../administration/chat-widget.mdx).
+
+### Tweaks
+
+The **Tweaks** tab displays the available parameters for your flow.
+Modifying the parameters changes the code parameters across all windows.
+For example, changing the **Chat Input** component's `input_value` will change that value across all API calls.
+
+
+
+
+
+## Project options menu
+
+To see options for your project, in the upper left corner of the canvas, select the dropdown menu.
+
+
+
+**New** - Start a new project.
+
+**Duplicate** - Duplicate the current flow as a new project.
+
+**Settings** - Modify the project's **Name** or **Description**.
+
+**Import** - Upload a flow `.json` file from your local machine.
+
+**Export** - Download your current project to your local machine as a `.json` file.
+
+**Undo** or **Redo** - Undo or redo your last action.
diff --git a/docs/docs/getting-started/flows-components-collections.mdx b/docs/docs/getting-started/flows-components-collections.mdx
new file mode 100644
index 000000000..a1e7f95a8
--- /dev/null
+++ b/docs/docs/getting-started/flows-components-collections.mdx
@@ -0,0 +1,115 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# 🖥️ Flows, components, collections, and projects
+
+## TL;DR
+
+A [flow](#flow) is a pipeline of components connected together in the Langflow canvas.
+
+A [component](#component) is a single building block within a flow. A component has inputs, outputs, and parameters that define its functionality.
+
+A [collection](#collection) is a snapshot of the flows available in your database. Collections can be downloaded to local storage and uploaded for future use.
+
+A [project](#project) can be a component or a flow. Projects are saved as part of your collection.
+
+For example, the **OpenAI LLM** is a **component** of the **Basic prompting** flow, and the **flow** is stored in a **collection**.
+
+## Flow
+
+A **flow** is a pipeline of components connected together in the Langflow canvas.
+
+For example, the [Basic prompting](../starter-projects/basic-prompting.mdx) flow is a pipeline of four components:
+
+
+
+For example, the **OpenAI LLM component** receives input (left side) and produces output (right side) - in this case, receiving input from the **Chat Input** and **Prompt** components and producing output to the **Chat Output** component.
+
+## Component
+
+Components are the building blocks of flows. They consist of inputs, outputs, and parameters that define their functionality. These elements provide a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://python.langchain.com/docs/integrations/components).
+
+
+ During the flow creation process, you will notice handles (colored circles)
+ attached to one or both sides of a component. These handles represent the
+ availability to connect to other components. Hover over a handle to see
+ connection details.
+
+
+
+ For example, if you select a ConversationChain component, you
+ will see orange o and purple{" "}
+ o input handles. They indicate that
+ this component accepts an LLM and a Memory component as inputs. The red
+ asterisk * means that at least one input
+ of that type is required.
+
+
+{" "}
+
+
+
+
+In the top right corner of the component, you'll find the component status icon ().
+Build the flow by clicking the **Playground** at the bottom right of the canvas.
+
+Once the validation is complete, the status of each validated component should turn green ().
+To debug, hover over the component status to see the outputs.
+
+
+
+---
+
+### Component Parameters
+
+Langflow components can be edited by clicking the component settings button. Hide parameters to reduce complexity and keep the canvas clean and intuitive for experimentation.
+
+
+
+
+
+## Collection
+
+A collection is a snapshot of flows available in a database.
+
+Collections can be downloaded to local storage and uploaded for future use.
+
+
+
+
+
+## Project
+
+A **Project** can be a flow or a component. To view your saved projects, select **My Collection**.
+
+Your **Projects** are displayed.
+
+Click the ** Playground** button to run a flow from the **My Collection** screen.
+
+In the top left corner of the screen are options for **Download Collection**, **Upload Collection**, and **New Project**.
+
+Select **Download Collection** to save your project to your local machine. This downloads all flows and components as a `.json` file.
+
+Select **Upload Collection** to upload a flow or component `.json` file from your local machine.
+
+Select **New Project** to create a new project. In addition to a blank canvas, [starter projects](../starter-projects/basic-prompting.mdx) are also available.
diff --git a/docs/docs/migration/possible-installation-issues.mdx b/docs/docs/migration/possible-installation-issues.mdx
index 00b6f2cd2..a012a1c09 100644
--- a/docs/docs/migration/possible-installation-issues.mdx
+++ b/docs/docs/migration/possible-installation-issues.mdx
@@ -1,4 +1,4 @@
-# Common Installation Issues
+# ❗️ Common Installation Issues
This is a list of possible issues that you may encounter when installing Langflow 1.0 alpha and how to solve them.
@@ -25,11 +25,11 @@ ModuleNotFoundError: No module named 'langflow.__main__'
There are two possible reasons for this error:
1. You've installed Langflow using _`pip install langflow`_ but you already had a previous version of Langflow installed in your system.
- In this case, you might be running the wrong executable.
- To solve this issue, run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_.
- If that doesn't work, try uninstalling and reinstalling Langflow with _`python -m pip install langflow --pre -U`_.
+ In this case, you might be running the wrong executable.
+ To solve this issue, run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_.
+ If that doesn't work, try uninstalling and reinstalling Langflow with _`python -m pip install langflow --pre -U`_.
2. Some version conflicts might have occurred during the installation process.
- Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall Langflow and its dependencies.
+ Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall Langflow and its dependencies.
## _`Something went wrong running migrations. Please, run 'langflow migration --fix'`_
@@ -45,4 +45,3 @@ There are two possible reasons for this error:
This error can occur during Langflow upgrades when the new version can't override `langflow-pre.db` in `.cache/langflow/`. Clearing the cache removes this file but will also erase your settings.
If you wish to retain your files, back them up before clearing the folder.
-
diff --git a/docs/sidebars.js b/docs/sidebars.js
index 81b255a66..7a5d5bdc2 100644
--- a/docs/sidebars.js
+++ b/docs/sidebars.js
@@ -15,6 +15,9 @@ module.exports = {
"getting-started/install-langflow",
"getting-started/quickstart",
"getting-started/huggingface-spaces",
+ "getting-started/canvas",
+ "getting-started/flows-components-collections",
+ "migration/possible-installation-issues",
"getting-started/new-to-llms",
],
},
@@ -35,15 +38,12 @@ module.exports = {
label: "Administration",
collapsed: false,
items: [
- "administration/login",
"administration/api",
+ "administration/login",
"administration/cli",
"administration/playground",
"administration/global-env",
- "administration/components",
- "administration/collection",
- "administration/prompt-customization",
- "administration/langfuse_integration",
+ "administration/chat-widget",
],
},
{
diff --git a/docs/static/data/AstraDB-RAG-Flows.json b/docs/static/data/AstraDB-RAG-Flows.json
index 3bc1c8634..d38364b4a 100644
--- a/docs/static/data/AstraDB-RAG-Flows.json
+++ b/docs/static/data/AstraDB-RAG-Flows.json
@@ -1,3403 +1,3147 @@
{
- "id": "51e2b78a-199b-4054-9f32-e288eef6924c",
- "data": {
- "nodes": [
- {
- "id": "ChatInput-yxMKE",
- "type": "genericNode",
- "position": {
- "x": 1195.5276981160775,
- "y": 209.421875
- },
- "data": {
- "type": "ChatInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "value": "what is a line"
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Get chat inputs from the Playground.",
- "icon": "ChatInput",
- "base_classes": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "display_name": "Chat Input",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatInput-yxMKE"
- },
- "selected": false,
- "width": 384,
- "height": 383
+ "id": "51e2b78a-199b-4054-9f32-e288eef6924c",
+ "data": {
+ "nodes": [
+ {
+ "id": "ChatInput-yxMKE",
+ "type": "genericNode",
+ "position": {
+ "x": 1195.5276981160775,
+ "y": 209.421875
+ },
+ "data": {
+ "type": "ChatInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "value": "what is a line"
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "TextOutput-BDknO",
- "type": "genericNode",
- "position": {
- "x": 2322.600672827879,
- "y": 604.9467307442569
- },
- "data": {
- "type": "TextOutput",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as output.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a text output in the Playground.",
- "icon": "type",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "Extracted Chunks",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextOutput-BDknO"
- },
- "selected": false,
- "width": 384,
- "height": 289,
- "positionAbsolute": {
- "x": 2322.600672827879,
- "y": 604.9467307442569
- },
- "dragging": false
+ "description": "Get chat inputs from the Playground.",
+ "icon": "ChatInput",
+ "base_classes": ["Text", "str", "object", "Record"],
+ "display_name": "Chat Input",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
},
- {
- "id": "OpenAIEmbeddings-ZlOk1",
- "type": "genericNode",
- "position": {
- "x": 1183.667250865064,
- "y": 687.3171828430261
- },
- "data": {
- "type": "OpenAIEmbeddings",
- "node": {
- "template": {
- "allowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "allowed_special",
- "display_name": "Allowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "client": {
- "type": "Any",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "client",
- "display_name": "Client",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_headers": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_headers",
- "display_name": "Default Headers",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_query": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_query",
- "display_name": "Default Query",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "deployment": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "deployment",
- "display_name": "Deployment",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "disallowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [
- "all"
- ],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "disallowed_special",
- "display_name": "Disallowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "embedding_ctx_length": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 8191,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding_ctx_length",
- "display_name": "Embedding Context Length",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_retries": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 6,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_retries",
- "display_name": "Max Retries",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "text-embedding-3-small",
- "text-embedding-3-large",
- "text-embedding-ada-002"
- ],
- "name": "model",
- "display_name": "Model",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "openai_api_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_type",
- "display_name": "OpenAI API Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_version": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_version",
- "display_name": "OpenAI API Version",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_organization": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_organization",
- "display_name": "OpenAI Organization",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_proxy": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_proxy",
- "display_name": "OpenAI Proxy",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "request_timeout": {
- "type": "float",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "request_timeout",
- "display_name": "Request Timeout",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "show_progress_bar": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "show_progress_bar",
- "display_name": "Show Progress Bar",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "skip_empty": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "skip_empty",
- "display_name": "Skip Empty",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_enable": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_enable",
- "display_name": "TikToken Enable",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_model_name",
- "display_name": "TikToken Model Name",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Generate embeddings using OpenAI models.",
- "base_classes": [
- "Embeddings"
- ],
- "display_name": "OpenAI Embeddings",
- "documentation": "",
- "custom_fields": {
- "openai_api_key": null,
- "default_headers": null,
- "default_query": null,
- "allowed_special": null,
- "disallowed_special": null,
- "chunk_size": null,
- "client": null,
- "deployment": null,
- "embedding_ctx_length": null,
- "max_retries": null,
- "model": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "openai_api_type": null,
- "openai_api_version": null,
- "openai_organization": null,
- "openai_proxy": null,
- "request_timeout": null,
- "show_progress_bar": null,
- "skip_empty": null,
- "tiktoken_enable": null,
- "tiktoken_model_name": null
- },
- "output_types": [
- "Embeddings"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "OpenAIEmbeddings-ZlOk1"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "dragging": false
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatInput-yxMKE"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383
+ },
+ {
+ "id": "TextOutput-BDknO",
+ "type": "genericNode",
+ "position": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "data": {
+ "type": "TextOutput",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": ["Record", "Text"],
+ "dynamic": false,
+ "info": "Text or Record to be passed as output.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "OpenAIModel-EjXlN",
- "type": "genericNode",
- "position": {
- "x": 3410.117202077183,
- "y": 431.2038048137648
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-3.5-turbo",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-4-0125-preview",
- "gpt-4-1106-preview",
- "gpt-4-vision-preview",
- "gpt-3.5-turbo-0125",
- "gpt-3.5-turbo-1106"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 0.1,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-EjXlN"
- },
- "selected": true,
- "width": 384,
- "height": 563,
- "positionAbsolute": {
- "x": 3410.117202077183,
- "y": 431.2038048137648
- },
- "dragging": false
+ "description": "Display a text output in the Playground.",
+ "icon": "type",
+ "base_classes": ["object", "Text", "str"],
+ "display_name": "Extracted Chunks",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
},
- {
- "id": "Prompt-xeI6K",
- "type": "genericNode",
- "position": {
- "x": 2969.0261961391298,
- "y": 442.1613649809069
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextOutput-BDknO"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 289,
+ "positionAbsolute": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-ZlOk1",
+ "type": "genericNode",
+ "position": {
+ "x": 1183.667250865064,
+ "y": 687.3171828430261
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": ["all"],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": ""
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "context": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "context",
- "display_name": "context",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- },
- "question": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "question",
- "display_name": "question",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "context",
- "question"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-xeI6K",
- "description": "Create a prompt template with dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 477,
- "positionAbsolute": {
- "x": 2969.0261961391298,
- "y": 442.1613649809069
- },
- "dragging": false
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "ChatOutput-Q39I8",
- "type": "genericNode",
- "position": {
- "x": 3887.2073667611485,
- "y": 588.4801225794856
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "In case of Message being a Record, this template will be used to convert it to text.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "AI",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "object",
- "Text",
- "Record",
- "str"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null,
- "record_template": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-Q39I8"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "positionAbsolute": {
- "x": 3887.2073667611485,
- "y": 588.4801225794856
- },
- "dragging": false
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": ["Embeddings"],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
},
- {
- "id": "File-t0a6a",
- "type": "genericNode",
- "position": {
- "x": 2257.233450682836,
- "y": 1747.5389618367233
+ "output_types": ["Embeddings"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-ZlOk1"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "dragging": false
+ },
+ {
+ "id": "OpenAIModel-EjXlN",
+ "type": "genericNode",
+ "position": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-3.5-turbo",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-4-0125-preview",
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
+ "gpt-3.5-turbo-0125",
+ "gpt-3.5-turbo-1106"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "data": {
- "type": "File",
- "node": {
- "template": {
- "path": {
- "type": "file",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [
- ".txt",
- ".md",
- ".mdx",
- ".csv",
- ".json",
- ".yaml",
- ".yml",
- ".xml",
- ".html",
- ".htm",
- ".pdf",
- ".docx"
- ],
- "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf",
- "password": false,
- "name": "path",
- "display_name": "Path",
- "advanced": false,
- "dynamic": false,
- "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx",
- "load_from_db": false,
- "title_case": false,
- "value": ""
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "silent_errors": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "silent_errors",
- "display_name": "Silent Errors",
- "advanced": true,
- "dynamic": false,
- "info": "If true, errors will not raise an exception.",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "A generic file loader.",
- "icon": "file-text",
- "base_classes": [
- "Record"
- ],
- "display_name": "File",
- "documentation": "",
- "custom_fields": {
- "path": null,
- "silent_errors": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "File-t0a6a"
- },
- "selected": false,
- "width": 384,
- "height": 281,
- "positionAbsolute": {
- "x": 2257.233450682836,
- "y": 1747.5389618367233
- },
- "dragging": false
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "RecursiveCharacterTextSplitter-tR9QM",
- "type": "genericNode",
- "position": {
- "x": 2791.013514133929,
- "y": 1462.9588953494142
- },
- "data": {
- "type": "RecursiveCharacterTextSplitter",
- "node": {
- "template": {
- "inputs": {
- "type": "Document",
- "required": true,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "inputs",
- "display_name": "Input",
- "advanced": false,
- "input_types": [
- "Document",
- "Record"
- ],
- "dynamic": false,
- "info": "The texts to split.",
- "load_from_db": false,
- "title_case": false
- },
- "chunk_overlap": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 200,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_overlap",
- "display_name": "Chunk Overlap",
- "advanced": false,
- "dynamic": false,
- "info": "The amount of overlap between chunks.",
- "load_from_db": false,
- "title_case": false
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": false,
- "dynamic": false,
- "info": "The maximum length of each chunk.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "separators": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "separators",
- "display_name": "Separators",
- "advanced": false,
- "dynamic": false,
- "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": [
- ""
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Split text into chunks of a specified length.",
- "base_classes": [
- "Record"
- ],
- "display_name": "Recursive Character Text Splitter",
- "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter",
- "custom_fields": {
- "inputs": null,
- "separators": null,
- "chunk_size": null,
- "chunk_overlap": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "RecursiveCharacterTextSplitter-tR9QM"
- },
- "selected": false,
- "width": 384,
- "height": 501,
- "positionAbsolute": {
- "x": 2791.013514133929,
- "y": 1462.9588953494142
- },
- "dragging": false
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["object", "Text", "str"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
},
- {
- "id": "AstraDBSearch-41nRz",
- "type": "genericNode",
- "position": {
- "x": 1723.976434815103,
- "y": 277.03317407245913
- },
- "data": {
- "type": "AstraDBSearch",
- "node": {
- "template": {
- "embedding": {
- "type": "Embeddings",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding",
- "display_name": "Embedding",
- "advanced": false,
- "dynamic": false,
- "info": "Embedding to use",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input Value",
- "advanced": false,
- "dynamic": false,
- "info": "Input value to search",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "api_endpoint": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "api_endpoint",
- "display_name": "API Endpoint",
- "advanced": false,
- "dynamic": false,
- "info": "API endpoint URL for the Astra DB service.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "batch_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "batch_size",
- "display_name": "Batch Size",
- "advanced": true,
- "dynamic": false,
- "info": "Optional number of records to process in a single batch.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_delete_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_delete_concurrency",
- "display_name": "Bulk Delete Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk delete operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_batch_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_batch_concurrency",
- "display_name": "Bulk Insert Batch Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_overwrite_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_overwrite_concurrency",
- "display_name": "Bulk Insert Overwrite Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "collection_indexing_policy": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_indexing_policy",
- "display_name": "Collection Indexing Policy",
- "advanced": true,
- "dynamic": false,
- "info": "Optional dictionary defining the indexing policy for the collection.",
- "load_from_db": false,
- "title_case": false
- },
- "collection_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_name",
- "display_name": "Collection Name",
- "advanced": false,
- "dynamic": false,
- "info": "The name of the collection within Astra DB where the vectors will be stored.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "langflow"
- },
- "metadata_indexing_exclude": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_exclude",
- "display_name": "Metadata Indexing Exclude",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to exclude from the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metadata_indexing_include": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_include",
- "display_name": "Metadata Indexing Include",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to include in the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metric": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metric",
- "display_name": "Metric",
- "advanced": true,
- "dynamic": false,
- "info": "Optional distance metric for vector comparisons in the vector store.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "namespace": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "namespace",
- "display_name": "Namespace",
- "advanced": true,
- "dynamic": false,
- "info": "Optional namespace within Astra DB to use for the collection.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "number_of_results": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 4,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "number_of_results",
- "display_name": "Number of Results",
- "advanced": true,
- "dynamic": false,
- "info": "Number of results to return.",
- "load_from_db": false,
- "title_case": false
- },
- "pre_delete_collection": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "pre_delete_collection",
- "display_name": "Pre Delete Collection",
- "advanced": true,
- "dynamic": false,
- "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
- "load_from_db": false,
- "title_case": false
- },
- "search_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Similarity",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Similarity",
- "MMR"
- ],
- "name": "search_type",
- "display_name": "Search Type",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "setup_mode": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Sync",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Sync",
- "Async",
- "Off"
- ],
- "name": "setup_mode",
- "display_name": "Setup Mode",
- "advanced": true,
- "dynamic": false,
- "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "token": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "token",
- "display_name": "Token",
- "advanced": false,
- "dynamic": false,
- "info": "Authentication token for accessing Astra DB.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "_type": "CustomComponent"
- },
- "description": "Searches an existing Astra DB Vector Store.",
- "icon": "AstraDB",
- "base_classes": [
- "Record"
- ],
- "display_name": "Astra DB Search",
- "documentation": "",
- "custom_fields": {
- "embedding": null,
- "collection_name": null,
- "input_value": null,
- "token": null,
- "api_endpoint": null,
- "search_type": null,
- "number_of_results": null,
- "namespace": null,
- "metric": null,
- "batch_size": null,
- "bulk_insert_batch_concurrency": null,
- "bulk_insert_overwrite_concurrency": null,
- "bulk_delete_concurrency": null,
- "setup_mode": null,
- "pre_delete_collection": null,
- "metadata_indexing_include": null,
- "metadata_indexing_exclude": null,
- "collection_indexing_policy": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "token",
- "api_endpoint",
- "collection_name",
- "input_value",
- "embedding"
- ],
- "beta": false
- },
- "id": "AstraDBSearch-41nRz"
- },
- "selected": false,
- "width": 384,
- "height": 713,
- "dragging": false,
- "positionAbsolute": {
- "x": 1723.976434815103,
- "y": 277.03317407245913
- }
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-EjXlN"
+ },
+ "selected": true,
+ "width": 384,
+ "height": 563,
+ "positionAbsolute": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "dragging": false
+ },
+ {
+ "id": "Prompt-xeI6K",
+ "type": "genericNode",
+ "position": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "context": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "context",
+ "display_name": "context",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "question": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "question",
+ "display_name": "question",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "AstraDB-eUCSS",
- "type": "genericNode",
- "position": {
- "x": 3372.04958055989,
- "y": 1611.0742035495277
- },
- "data": {
- "type": "AstraDB",
- "node": {
- "template": {
- "embedding": {
- "type": "Embeddings",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding",
- "display_name": "Embedding",
- "advanced": false,
- "dynamic": false,
- "info": "Embedding to use",
- "load_from_db": false,
- "title_case": false
- },
- "inputs": {
- "type": "Record",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "inputs",
- "display_name": "Inputs",
- "advanced": false,
- "dynamic": false,
- "info": "Optional list of records to be processed and stored in the vector store.",
- "load_from_db": false,
- "title_case": false
- },
- "api_endpoint": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "api_endpoint",
- "display_name": "API Endpoint",
- "advanced": false,
- "dynamic": false,
- "info": "API endpoint URL for the Astra DB service.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "batch_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "batch_size",
- "display_name": "Batch Size",
- "advanced": true,
- "dynamic": false,
- "info": "Optional number of records to process in a single batch.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_delete_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_delete_concurrency",
- "display_name": "Bulk Delete Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk delete operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_batch_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_batch_concurrency",
- "display_name": "Bulk Insert Batch Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_overwrite_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_overwrite_concurrency",
- "display_name": "Bulk Insert Overwrite Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "collection_indexing_policy": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_indexing_policy",
- "display_name": "Collection Indexing Policy",
- "advanced": true,
- "dynamic": false,
- "info": "Optional dictionary defining the indexing policy for the collection.",
- "load_from_db": false,
- "title_case": false
- },
- "collection_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_name",
- "display_name": "Collection Name",
- "advanced": false,
- "dynamic": false,
- "info": "The name of the collection within Astra DB where the vectors will be stored.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "langflow"
- },
- "metadata_indexing_exclude": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_exclude",
- "display_name": "Metadata Indexing Exclude",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to exclude from the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metadata_indexing_include": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_include",
- "display_name": "Metadata Indexing Include",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to include in the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metric": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metric",
- "display_name": "Metric",
- "advanced": true,
- "dynamic": false,
- "info": "Optional distance metric for vector comparisons in the vector store.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "namespace": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "namespace",
- "display_name": "Namespace",
- "advanced": true,
- "dynamic": false,
- "info": "Optional namespace within Astra DB to use for the collection.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "pre_delete_collection": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "pre_delete_collection",
- "display_name": "Pre Delete Collection",
- "advanced": true,
- "dynamic": false,
- "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
- "load_from_db": false,
- "title_case": false
- },
- "setup_mode": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Async",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Sync",
- "Async",
- "Off"
- ],
- "name": "setup_mode",
- "display_name": "Setup Mode",
- "advanced": true,
- "dynamic": false,
- "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "token": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "token",
- "display_name": "Token",
- "advanced": false,
- "dynamic": false,
- "info": "Authentication token for accessing Astra DB.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "_type": "CustomComponent"
- },
- "description": "Builds or loads an Astra DB Vector Store.",
- "icon": "AstraDB",
- "base_classes": [
- "VectorStore"
- ],
- "display_name": "Astra DB",
- "documentation": "",
- "custom_fields": {
- "embedding": null,
- "token": null,
- "api_endpoint": null,
- "collection_name": null,
- "inputs": null,
- "namespace": null,
- "metric": null,
- "batch_size": null,
- "bulk_insert_batch_concurrency": null,
- "bulk_insert_overwrite_concurrency": null,
- "bulk_delete_concurrency": null,
- "setup_mode": null,
- "pre_delete_collection": null,
- "metadata_indexing_include": null,
- "metadata_indexing_exclude": null,
- "collection_indexing_policy": null
- },
- "output_types": [
- "VectorStore"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "token",
- "api_endpoint",
- "collection_name",
- "inputs",
- "embedding"
- ],
- "beta": false
- },
- "id": "AstraDB-eUCSS"
- },
- "selected": false,
- "width": 384,
- "height": 573,
- "positionAbsolute": {
- "x": 3372.04958055989,
- "y": 1611.0742035495277
- },
- "dragging": false
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["object", "Text", "str"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["context", "question"]
},
- {
- "id": "OpenAIEmbeddings-9TPjc",
- "type": "genericNode",
- "position": {
- "x": 2814.0402191223047,
- "y": 1955.9268168273086
- },
- "data": {
- "type": "OpenAIEmbeddings",
- "node": {
- "template": {
- "allowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "allowed_special",
- "display_name": "Allowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "client": {
- "type": "Any",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "client",
- "display_name": "Client",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_headers": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_headers",
- "display_name": "Default Headers",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_query": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_query",
- "display_name": "Default Query",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "deployment": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "deployment",
- "display_name": "Deployment",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "disallowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [
- "all"
- ],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "disallowed_special",
- "display_name": "Disallowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "embedding_ctx_length": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 8191,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding_ctx_length",
- "display_name": "Embedding Context Length",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_retries": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 6,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_retries",
- "display_name": "Max Retries",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "text-embedding-3-small",
- "text-embedding-3-large",
- "text-embedding-ada-002"
- ],
- "name": "model",
- "display_name": "Model",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "openai_api_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_type",
- "display_name": "OpenAI API Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_version": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_version",
- "display_name": "OpenAI API Version",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_organization": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_organization",
- "display_name": "OpenAI Organization",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_proxy": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_proxy",
- "display_name": "OpenAI Proxy",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "request_timeout": {
- "type": "float",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "request_timeout",
- "display_name": "Request Timeout",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "show_progress_bar": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "show_progress_bar",
- "display_name": "Show Progress Bar",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "skip_empty": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "skip_empty",
- "display_name": "Skip Empty",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_enable": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_enable",
- "display_name": "TikToken Enable",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_model_name",
- "display_name": "TikToken Model Name",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Generate embeddings using OpenAI models.",
- "base_classes": [
- "Embeddings"
- ],
- "display_name": "OpenAI Embeddings",
- "documentation": "",
- "custom_fields": {
- "openai_api_key": null,
- "default_headers": null,
- "default_query": null,
- "allowed_special": null,
- "disallowed_special": null,
- "chunk_size": null,
- "client": null,
- "deployment": null,
- "embedding_ctx_length": null,
- "max_retries": null,
- "model": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "openai_api_type": null,
- "openai_api_version": null,
- "openai_organization": null,
- "openai_proxy": null,
- "request_timeout": null,
- "show_progress_bar": null,
- "skip_empty": null,
- "tiktoken_enable": null,
- "tiktoken_model_name": null
- },
- "output_types": [
- "Embeddings"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "OpenAIEmbeddings-9TPjc"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "positionAbsolute": {
- "x": 2814.0402191223047,
- "y": 1955.9268168273086
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "TextOutput-BDknO",
- "target": "Prompt-xeI6K",
- "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}",
- "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-TextOutput-BDknO{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}-Prompt-xeI6K{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "context",
- "id": "Prompt-xeI6K",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "TextOutput",
- "id": "TextOutput-BDknO"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-xeI6K",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 477,
+ "positionAbsolute": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-Q39I8",
+ "type": "genericNode",
+ "position": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "ChatInput-yxMKE",
- "target": "Prompt-xeI6K",
- "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}",
- "targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-Prompt-xeI6K{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "question",
- "id": "Prompt-xeI6K",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-yxMKE"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["object", "Text", "Record", "str"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
},
- {
- "source": "Prompt-xeI6K",
- "target": "OpenAIModel-EjXlN",
- "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-Prompt-xeI6K{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}-OpenAIModel-EjXlN{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-EjXlN",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "Prompt",
- "id": "Prompt-xeI6K"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-Q39I8"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "dragging": false
+ },
+ {
+ "id": "File-t0a6a",
+ "type": "genericNode",
+ "position": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "data": {
+ "type": "File",
+ "node": {
+ "template": {
+ "path": {
+ "type": "file",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [
+ ".txt",
+ ".md",
+ ".mdx",
+ ".csv",
+ ".json",
+ ".yaml",
+ ".yml",
+ ".xml",
+ ".html",
+ ".htm",
+ ".pdf",
+ ".docx"
+ ],
+ "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf",
+ "password": false,
+ "name": "path",
+ "display_name": "Path",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx",
+ "load_from_db": false,
+ "title_case": false,
+ "value": ""
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "silent_errors": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "silent_errors",
+ "display_name": "Silent Errors",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If true, errors will not raise an exception.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIModel-EjXlN",
- "target": "ChatOutput-Q39I8",
- "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "id": "reactflow__edge-OpenAIModel-EjXlN{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}-ChatOutput-Q39I8{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-Q39I8",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-EjXlN"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "description": "A generic file loader.",
+ "icon": "file-text",
+ "base_classes": ["Record"],
+ "display_name": "File",
+ "documentation": "",
+ "custom_fields": {
+ "path": null,
+ "silent_errors": null
},
- {
- "source": "File-t0a6a",
- "target": "RecursiveCharacterTextSplitter-tR9QM",
- "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}",
- "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}",
- "id": "reactflow__edge-File-t0a6a{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}-RecursiveCharacterTextSplitter-tR9QM{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}",
- "data": {
- "targetHandle": {
- "fieldName": "inputs",
- "id": "RecursiveCharacterTextSplitter-tR9QM",
- "inputTypes": [
- "Document",
- "Record"
- ],
- "type": "Document"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "File",
- "id": "File-t0a6a"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "File-t0a6a"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 281,
+ "positionAbsolute": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "dragging": false
+ },
+ {
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "type": "genericNode",
+ "position": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "data": {
+ "type": "RecursiveCharacterTextSplitter",
+ "node": {
+ "template": {
+ "inputs": {
+ "type": "Document",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Input",
+ "advanced": false,
+ "input_types": ["Document", "Record"],
+ "dynamic": false,
+ "info": "The texts to split.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_overlap": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 200,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_overlap",
+ "display_name": "Chunk Overlap",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The amount of overlap between chunks.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The maximum length of each chunk.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "separators": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "separators",
+ "display_name": "Separators",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": [""]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIEmbeddings-ZlOk1",
- "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}",
- "target": "AstraDBSearch-41nRz",
- "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
- "data": {
- "targetHandle": {
- "fieldName": "embedding",
- "id": "AstraDBSearch-41nRz",
- "inputTypes": null,
- "type": "Embeddings"
- },
- "sourceHandle": {
- "baseClasses": [
- "Embeddings"
- ],
- "dataType": "OpenAIEmbeddings",
- "id": "OpenAIEmbeddings-ZlOk1"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}-AstraDBSearch-41nRz{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}"
+ "description": "Split text into chunks of a specified length.",
+ "base_classes": ["Record"],
+ "display_name": "Recursive Character Text Splitter",
+ "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter",
+ "custom_fields": {
+ "inputs": null,
+ "separators": null,
+ "chunk_size": null,
+ "chunk_overlap": null
},
- {
- "source": "ChatInput-yxMKE",
- "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}",
- "target": "AstraDBSearch-41nRz",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "AstraDBSearch-41nRz",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-yxMKE"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-AstraDBSearch-41nRz{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}"
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 501,
+ "positionAbsolute": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "dragging": false
+ },
+ {
+ "id": "AstraDBSearch-41nRz",
+ "type": "genericNode",
+ "position": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
+ },
+ "data": {
+ "type": "AstraDBSearch",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input Value",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Input value to search",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_API_ENDPOINT"
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "number_of_results": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 4,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "number_of_results",
+ "display_name": "Number of Results",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Number of results to return.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "search_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Similarity",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Similarity", "MMR"],
+ "name": "search_type",
+ "display_name": "Search Type",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Sync",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Sync", "Async", "Off"],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_APPLICATION_TOKEN"
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "RecursiveCharacterTextSplitter-tR9QM",
- "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}",
- "target": "AstraDB-eUCSS",
- "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}",
- "data": {
- "targetHandle": {
- "fieldName": "inputs",
- "id": "AstraDB-eUCSS",
- "inputTypes": null,
- "type": "Record"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "RecursiveCharacterTextSplitter",
- "id": "RecursiveCharacterTextSplitter-tR9QM"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}-AstraDB-eUCSS{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}",
- "selected": false
+ "description": "Searches an existing Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": ["Record"],
+ "display_name": "Astra DB Search",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "collection_name": null,
+ "input_value": null,
+ "token": null,
+ "api_endpoint": null,
+ "search_type": null,
+ "number_of_results": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
},
- {
- "source": "OpenAIEmbeddings-9TPjc",
- "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}",
- "target": "AstraDB-eUCSS",
- "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
- "data": {
- "targetHandle": {
- "fieldName": "embedding",
- "id": "AstraDB-eUCSS",
- "inputTypes": null,
- "type": "Embeddings"
- },
- "sourceHandle": {
- "baseClasses": [
- "Embeddings"
- ],
- "dataType": "OpenAIEmbeddings",
- "id": "OpenAIEmbeddings-9TPjc"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}-AstraDB-eUCSS{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}",
- "selected": false
- },
- {
- "source": "AstraDBSearch-41nRz",
- "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}",
- "target": "TextOutput-BDknO",
- "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "TextOutput-BDknO",
- "inputTypes": [
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "AstraDBSearch",
- "id": "AstraDBSearch-41nRz"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-AstraDBSearch-41nRz{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}-TextOutput-BDknO{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}"
- }
- ],
- "viewport": {
- "x": -259.6782520315529,
- "y": 90.3428735006047,
- "zoom": 0.2687057134854984
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "input_value",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDBSearch-41nRz"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 713,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
}
- },
- "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.",
- "name": "Vector Store RAG",
- "last_tested_version": "1.0.0a0",
- "is_component": false
-}
\ No newline at end of file
+ },
+ {
+ "id": "AstraDB-eUCSS",
+ "type": "genericNode",
+ "position": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "data": {
+ "type": "AstraDB",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "inputs": {
+ "type": "Record",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Inputs",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Optional list of records to be processed and stored in the vector store.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_API_ENDPOINT"
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Async",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Sync", "Async", "Off"],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_APPLICATION_TOKEN"
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Builds or loads an Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": ["VectorStore"],
+ "display_name": "Astra DB",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "token": null,
+ "api_endpoint": null,
+ "collection_name": null,
+ "inputs": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
+ },
+ "output_types": ["VectorStore"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "inputs",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDB-eUCSS"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 573,
+ "positionAbsolute": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-9TPjc",
+ "type": "genericNode",
+ "position": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": ["all"],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": ""
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": ["Embeddings"],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
+ },
+ "output_types": ["Embeddings"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-9TPjc"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "TextOutput-BDknO",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-TextOutput-BDknO{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "context",
+ "id": "Prompt-xeI6K",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "TextOutput",
+ "id": "TextOutput-BDknO"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "question",
+ "id": "Prompt-xeI6K",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Text", "str", "object", "Record"],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "Prompt-xeI6K",
+ "target": "OpenAIModel-EjXlN",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-Prompt-xeI6K{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}-OpenAIModel-EjXlN{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-EjXlN",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "Prompt",
+ "id": "Prompt-xeI6K"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIModel-EjXlN",
+ "target": "ChatOutput-Q39I8",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-OpenAIModel-EjXlN{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}-ChatOutput-Q39I8{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-Q39I8",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-EjXlN"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "File-t0a6a",
+ "target": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}",
+ "id": "reactflow__edge-File-t0a6a{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}-RecursiveCharacterTextSplitter-tR9QM{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "inputTypes": ["Document", "Record"],
+ "type": "Document"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "File",
+ "id": "File-t0a6a"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-ZlOk1",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Embeddings"],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-ZlOk1"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}"
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Text", "str", "object", "Record"],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Record"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "RecursiveCharacterTextSplitter",
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-9TPjc",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Embeddings"],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-9TPjc"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
+ "selected": false
+ },
+ {
+ "source": "AstraDBSearch-41nRz",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}",
+ "target": "TextOutput-BDknO",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "TextOutput-BDknO",
+ "inputTypes": ["Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "AstraDBSearch",
+ "id": "AstraDBSearch-41nRz"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-AstraDBSearch-41nRz{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}-TextOutput-BDknO{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ }
+ ],
+ "viewport": {
+ "x": -259.6782520315529,
+ "y": 90.3428735006047,
+ "zoom": 0.2687057134854984
+ }
+ },
+ "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.",
+ "name": "Vector Store RAG",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
+}
diff --git a/docs/static/img/api-window.png b/docs/static/img/api-window.png
new file mode 100644
index 000000000..47790433f
Binary files /dev/null and b/docs/static/img/api-window.png differ
diff --git a/docs/static/img/chat-input-with-menu.png b/docs/static/img/chat-input-with-menu.png
new file mode 100644
index 000000000..df48a3643
Binary files /dev/null and b/docs/static/img/chat-input-with-menu.png differ
diff --git a/docs/static/img/features.png b/docs/static/img/features.png
deleted file mode 100644
index 0e55c5a04..000000000
Binary files a/docs/static/img/features.png and /dev/null differ
diff --git a/docs/static/img/project-options-menu.png b/docs/static/img/project-options-menu.png
new file mode 100644
index 000000000..ab687c9ac
Binary files /dev/null and b/docs/static/img/project-options-menu.png differ
diff --git a/docs/static/img/single-component.png b/docs/static/img/single-component.png
new file mode 100644
index 000000000..93237f3c9
Binary files /dev/null and b/docs/static/img/single-component.png differ
diff --git a/docs/static/logos/botmessage.svg b/docs/static/logos/botmessage.svg
new file mode 100644
index 000000000..ab468da41
--- /dev/null
+++ b/docs/static/logos/botmessage.svg
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/static/logos/greencheck.svg b/docs/static/logos/greencheck.svg
new file mode 100644
index 000000000..842be95f7
--- /dev/null
+++ b/docs/static/logos/greencheck.svg
@@ -0,0 +1,11 @@
+
+
+
diff --git a/docs/static/logos/playbutton.svg b/docs/static/logos/playbutton.svg
new file mode 100644
index 000000000..978407473
--- /dev/null
+++ b/docs/static/logos/playbutton.svg
@@ -0,0 +1,11 @@
+
+
+
diff --git a/docs/static/videos/langflow_global_variables.mp4 b/docs/static/videos/langflow_global_variables.mp4
new file mode 100644
index 000000000..8be58e779
Binary files /dev/null and b/docs/static/videos/langflow_global_variables.mp4 differ
diff --git a/docs/static/videos/langflow_playground.mp4 b/docs/static/videos/langflow_playground.mp4
new file mode 100644
index 000000000..aa7488c5f
Binary files /dev/null and b/docs/static/videos/langflow_playground.mp4 differ
diff --git a/package-lock.json b/package-lock.json
deleted file mode 100644
index 8c3f329a0..000000000
--- a/package-lock.json
+++ /dev/null
@@ -1,932 +0,0 @@
-{
- "name": "langflow",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "dependencies": {
- "@radix-ui/react-popover": "^1.0.7",
- "cmdk": "^0.2.0"
- }
- },
- "node_modules/@babel/runtime": {
- "version": "7.23.2",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.2.tgz",
- "integrity": "sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg==",
- "dependencies": {
- "regenerator-runtime": "^0.14.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@floating-ui/core": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.5.0.tgz",
- "integrity": "sha512-kK1h4m36DQ0UHGj5Ah4db7R0rHemTqqO0QLvUqi1/mUUp3LuAWbWxdxSIf/XsnH9VS6rRVPLJCncjRzUvyCLXg==",
- "dependencies": {
- "@floating-ui/utils": "^0.1.3"
- }
- },
- "node_modules/@floating-ui/dom": {
- "version": "1.5.3",
- "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.3.tgz",
- "integrity": "sha512-ClAbQnEqJAKCJOEbbLo5IUlZHkNszqhuxS4fHAVxRPXPya6Ysf2G8KypnYcOTpx6I8xcgF9bbHb6g/2KpbV8qA==",
- "dependencies": {
- "@floating-ui/core": "^1.4.2",
- "@floating-ui/utils": "^0.1.3"
- }
- },
- "node_modules/@floating-ui/react-dom": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.4.tgz",
- "integrity": "sha512-CF8k2rgKeh/49UrnIBs4BdxPUV6vize/Db1d/YbCLyp9GiVZ0BEwf5AiDSxJRCr6yOkGqTFHtmrULxkEfYZ7dQ==",
- "dependencies": {
- "@floating-ui/dom": "^1.5.1"
- },
- "peerDependencies": {
- "react": ">=16.8.0",
- "react-dom": ">=16.8.0"
- }
- },
- "node_modules/@floating-ui/utils": {
- "version": "0.1.6",
- "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.6.tgz",
- "integrity": "sha512-OfX7E2oUDYxtBvsuS4e/jSn4Q9Qb6DzgeYtsAdkPZ47znpoNsMgZw0+tVijiv3uGNR6dgNlty6r9rzIzHjtd/A=="
- },
- "node_modules/@radix-ui/primitive": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.1.tgz",
- "integrity": "sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- }
- },
- "node_modules/@radix-ui/react-arrow": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz",
- "integrity": "sha512-wSP+pHsB/jQRaL6voubsQ/ZlrGBHHrOjmBnr19hxYgtS0WvAFwZhK2WP/YY5yF9uKECCEEDGxuLxq1NBK51wFA==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-primitive": "1.0.3"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-compose-refs": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz",
- "integrity": "sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-context": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.1.tgz",
- "integrity": "sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-dialog": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.0.tgz",
- "integrity": "sha512-Yn9YU+QlHYLWwV1XfKiqnGVpWYWk6MeBVM6x/bcoyPvxgjQGoeT35482viLPctTMWoMw0PoHgqfSox7Ig+957Q==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/primitive": "1.0.0",
- "@radix-ui/react-compose-refs": "1.0.0",
- "@radix-ui/react-context": "1.0.0",
- "@radix-ui/react-dismissable-layer": "1.0.0",
- "@radix-ui/react-focus-guards": "1.0.0",
- "@radix-ui/react-focus-scope": "1.0.0",
- "@radix-ui/react-id": "1.0.0",
- "@radix-ui/react-portal": "1.0.0",
- "@radix-ui/react-presence": "1.0.0",
- "@radix-ui/react-primitive": "1.0.0",
- "@radix-ui/react-slot": "1.0.0",
- "@radix-ui/react-use-controllable-state": "1.0.0",
- "aria-hidden": "^1.1.1",
- "react-remove-scroll": "2.5.4"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/primitive": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.0.tgz",
- "integrity": "sha512-3e7rn8FDMin4CgeL7Z/49smCA3rFYY3Ha2rUQ7HRWFadS5iCRw08ZgVT1LaNTCNqgvrUiyczLflrVrF0SRQtNA==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-compose-refs": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.0.tgz",
- "integrity": "sha512-0KaSv6sx787/hK3eF53iOkiSLwAGlFMx5lotrqD2pTjB18KbybKoEIgkNZTKC60YECDQTKGTRcDBILwZVqVKvA==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-context": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.0.tgz",
- "integrity": "sha512-1pVM9RfOQ+n/N5PJK33kRSKsr1glNxomxONs5c49MliinBY6Yw2Q995qfBUUo0/Mbg05B/sGA0gkgPI7kmSHBg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.0.tgz",
- "integrity": "sha512-n7kDRfx+LB1zLueRDvZ1Pd0bxdJWDUZNQ/GWoxDn2prnuJKRdxsjulejX/ePkOsLi2tTm6P24mDqlMSgQpsT6g==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/primitive": "1.0.0",
- "@radix-ui/react-compose-refs": "1.0.0",
- "@radix-ui/react-primitive": "1.0.0",
- "@radix-ui/react-use-callback-ref": "1.0.0",
- "@radix-ui/react-use-escape-keydown": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-guards": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.0.tgz",
- "integrity": "sha512-UagjDk4ijOAnGu4WMUPj9ahi7/zJJqNZ9ZAiGPp7waUWJO0O1aWXi/udPphI0IUjvrhBsZJGSN66dR2dsueLWQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-scope": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.0.tgz",
- "integrity": "sha512-C4SWtsULLGf/2L4oGeIHlvWQx7Rf+7cX/vKOAD2dXW0A1b5QXwi3wWeaEgW+wn+SEVrraMUk05vLU9fZZz5HbQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-compose-refs": "1.0.0",
- "@radix-ui/react-primitive": "1.0.0",
- "@radix-ui/react-use-callback-ref": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-id": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.0.tgz",
- "integrity": "sha512-Q6iAB/U7Tq3NTolBBQbHTgclPmGWE3OlktGGqrClPozSw4vkQ1DfQAOtzgRPecKsMdJINE05iaoDUG8tRzCBjw==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-use-layout-effect": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-portal": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.0.tgz",
- "integrity": "sha512-a8qyFO/Xb99d8wQdu4o7qnigNjTPG123uADNecz0eX4usnQEj7o+cG4ZX4zkqq98NYekT7UoEQIjxBNWIFuqTA==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-primitive": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-presence": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.0.0.tgz",
- "integrity": "sha512-A+6XEvN01NfVWiKu38ybawfHsBjWum42MRPnEuqPsBZ4eV7e/7K321B5VgYMPv3Xx5An6o1/l9ZuDBgmcmWK3w==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-compose-refs": "1.0.0",
- "@radix-ui/react-use-layout-effect": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-primitive": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-1.0.0.tgz",
- "integrity": "sha512-EyXe6mnRlHZ8b6f4ilTDrXmkLShICIuOTTj0GX4w1rp+wSxf3+TD05u1UOITC8VsJ2a9nwHvdXtOXEOl0Cw/zQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-slot": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-slot": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.0.tgz",
- "integrity": "sha512-3mrKauI/tWXo1Ll+gN5dHcxDPdm/Df1ufcDLCecn+pnCIVcdWE7CujXo8QaXOWRJyZyQWWbpB8eFwHzWXlv5mQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-compose-refs": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-callback-ref": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.0.tgz",
- "integrity": "sha512-GZtyzoHz95Rhs6S63D2t/eqvdFCm7I+yHMLVQheKM7nBD8mbZIt+ct1jz4536MDnaOGKIxynJ8eHTkVGVVkoTg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-controllable-state": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.0.tgz",
- "integrity": "sha512-FohDoZvk3mEXh9AWAVyRTYR4Sq7/gavuofglmiXB2g1aKyboUD4YtgWxKj8O5n+Uak52gXQ4wKz5IFST4vtJHg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-use-callback-ref": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-escape-keydown": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.0.tgz",
- "integrity": "sha512-JwfBCUIfhXRxKExgIqGa4CQsiMemo1Xt0W/B4ei3fpzpvPENKpMKQ8mZSB6Acj3ebrAEgi2xiQvcI1PAAodvyg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-use-callback-ref": "1.0.0"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-layout-effect": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.0.tgz",
- "integrity": "sha512-6Tpkq+R6LOlmQb1R5NNETLG0B4YP0wc+klfXafpUCj6JGyaUc8il7/kUZ7m59rGbXGczE9Bs+iz2qloqsZBduQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "react": "^16.8 || ^17.0 || ^18.0"
- }
- },
- "node_modules/@radix-ui/react-dialog/node_modules/react-remove-scroll": {
- "version": "2.5.4",
- "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.4.tgz",
- "integrity": "sha512-xGVKJJr0SJGQVirVFAUZ2k1QLyO6m+2fy0l8Qawbp5Jgrv3DeLalrfMNBFSlmz5kriGGzsVBtGVnf4pTKIhhWA==",
- "dependencies": {
- "react-remove-scroll-bar": "^2.3.3",
- "react-style-singleton": "^2.2.1",
- "tslib": "^2.1.0",
- "use-callback-ref": "^1.3.0",
- "use-sidecar": "^1.1.2"
- },
- "engines": {
- "node": ">=10"
- },
- "peerDependencies": {
- "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0",
- "react": "^16.8.0 || ^17.0.0 || ^18.0.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-dismissable-layer": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz",
- "integrity": "sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/primitive": "1.0.1",
- "@radix-ui/react-compose-refs": "1.0.1",
- "@radix-ui/react-primitive": "1.0.3",
- "@radix-ui/react-use-callback-ref": "1.0.1",
- "@radix-ui/react-use-escape-keydown": "1.0.3"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-focus-guards": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.1.tgz",
- "integrity": "sha512-Rect2dWbQ8waGzhMavsIbmSVCgYxkXLxxR3ZvCX79JOglzdEy4JXMb98lq4hPxUbLr77nP0UOGf4rcMU+s1pUA==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-focus-scope": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.4.tgz",
- "integrity": "sha512-sL04Mgvf+FmyvZeYfNu1EPAaaxD+aw7cYeIB9L9Fvq8+urhltTRaEo5ysKOpHuKPclsZcSUMKlN05x4u+CINpA==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-compose-refs": "1.0.1",
- "@radix-ui/react-primitive": "1.0.3",
- "@radix-ui/react-use-callback-ref": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-id": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.1.tgz",
- "integrity": "sha512-tI7sT/kqYp8p96yGWY1OAnLHrqDgzHefRBKQ2YAkBS5ja7QLcZ9Z/uY7bEjPUatf8RomoXM8/1sMj1IJaE5UzQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-use-layout-effect": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-popover": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.7.tgz",
- "integrity": "sha512-shtvVnlsxT6faMnK/a7n0wptwBD23xc1Z5mdrtKLwVEfsEMXodS0r5s0/g5P0hX//EKYZS2sxUjqfzlg52ZSnQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/primitive": "1.0.1",
- "@radix-ui/react-compose-refs": "1.0.1",
- "@radix-ui/react-context": "1.0.1",
- "@radix-ui/react-dismissable-layer": "1.0.5",
- "@radix-ui/react-focus-guards": "1.0.1",
- "@radix-ui/react-focus-scope": "1.0.4",
- "@radix-ui/react-id": "1.0.1",
- "@radix-ui/react-popper": "1.1.3",
- "@radix-ui/react-portal": "1.0.4",
- "@radix-ui/react-presence": "1.0.1",
- "@radix-ui/react-primitive": "1.0.3",
- "@radix-ui/react-slot": "1.0.2",
- "@radix-ui/react-use-controllable-state": "1.0.1",
- "aria-hidden": "^1.1.1",
- "react-remove-scroll": "2.5.5"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-popper": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.3.tgz",
- "integrity": "sha512-cKpopj/5RHZWjrbF2846jBNacjQVwkP068DfmgrNJXpvVWrOvlAmE9xSiy5OqeE+Gi8D9fP+oDhUnPqNMY8/5w==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@floating-ui/react-dom": "^2.0.0",
- "@radix-ui/react-arrow": "1.0.3",
- "@radix-ui/react-compose-refs": "1.0.1",
- "@radix-ui/react-context": "1.0.1",
- "@radix-ui/react-primitive": "1.0.3",
- "@radix-ui/react-use-callback-ref": "1.0.1",
- "@radix-ui/react-use-layout-effect": "1.0.1",
- "@radix-ui/react-use-rect": "1.0.1",
- "@radix-ui/react-use-size": "1.0.1",
- "@radix-ui/rect": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-portal": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.4.tgz",
- "integrity": "sha512-Qki+C/EuGUVCQTOTD5vzJzJuMUlewbzuKyUy+/iHM2uwGiru9gZeBJtHAPKAEkB5KWGi9mP/CHKcY0wt1aW45Q==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-primitive": "1.0.3"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-presence": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.0.1.tgz",
- "integrity": "sha512-UXLW4UAbIY5ZjcvzjfRFo5gxva8QirC9hF7wRE4U5gz+TP0DbRk+//qyuAQ1McDxBt1xNMBTaciFGvEmJvAZCg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-compose-refs": "1.0.1",
- "@radix-ui/react-use-layout-effect": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-primitive": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-1.0.3.tgz",
- "integrity": "sha512-yi58uVyoAcK/Nq1inRY56ZSjKypBNKTa/1mcL8qdl6oJeEaDbOldlzrGn7P6Q3Id5d+SYNGc5AJgc4vGhjs5+g==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-slot": "1.0.2"
- },
- "peerDependencies": {
- "@types/react": "*",
- "@types/react-dom": "*",
- "react": "^16.8 || ^17.0 || ^18.0",
- "react-dom": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- },
- "@types/react-dom": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-slot": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.2.tgz",
- "integrity": "sha512-YeTpuq4deV+6DusvVUW4ivBgnkHwECUu0BiN43L5UCDFgdhsRUWAghhTF5MbvNTPzmiFOx90asDSUjWuCNapwg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-compose-refs": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-use-callback-ref": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.1.tgz",
- "integrity": "sha512-D94LjX4Sp0xJFVaoQOd3OO9k7tpBYNOXdVhkltUbGv2Qb9OXdrg/CpsjlZv7ia14Sylv398LswWBVVu5nqKzAQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-use-controllable-state": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.1.tgz",
- "integrity": "sha512-Svl5GY5FQeN758fWKrjM6Qb7asvXeiZltlT4U2gVfl8Gx5UAv2sMR0LWo8yhsIZh2oQ0eFdZ59aoOOMV7b47VA==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-use-callback-ref": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-use-escape-keydown": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.3.tgz",
- "integrity": "sha512-vyL82j40hcFicA+M4Ex7hVkB9vHgSse1ZWomAqV2Je3RleKGO5iM8KMOEtfoSB0PnIelMd2lATjTGMYqN5ylTg==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-use-callback-ref": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-use-layout-effect": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.1.tgz",
- "integrity": "sha512-v/5RegiJWYdoCvMnITBkNNx6bCj20fiaJnWtRkU18yITptraXjffz5Qbn05uOiQnOvi+dbkznkoaMltz1GnszQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-use-rect": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.0.1.tgz",
- "integrity": "sha512-Cq5DLuSiuYVKNU8orzJMbl15TXilTnJKUCltMVQg53BQOF1/C5toAaGrowkgksdBQ9H+SRL23g0HDmg9tvmxXw==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/rect": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/react-use-size": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.0.1.tgz",
- "integrity": "sha512-ibay+VqrgcaI6veAojjofPATwledXiSmX+C0KrBk/xgpX9rBzPV3OsfwlhQdUOFbh+LKQorLYT+xTXW9V8yd0g==",
- "dependencies": {
- "@babel/runtime": "^7.13.10",
- "@radix-ui/react-use-layout-effect": "1.0.1"
- },
- "peerDependencies": {
- "@types/react": "*",
- "react": "^16.8 || ^17.0 || ^18.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@radix-ui/rect": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.0.1.tgz",
- "integrity": "sha512-fyrgCaedtvMg9NK3en0pnOYJdtfwxUcNolezkNPUsoX57X8oQk+NkqcvzHXD2uKNij6GXmWU9NDru2IWjrO4BQ==",
- "dependencies": {
- "@babel/runtime": "^7.13.10"
- }
- },
- "node_modules/aria-hidden": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.3.tgz",
- "integrity": "sha512-xcLxITLe2HYa1cnYnwCjkOO1PqUHQpozB8x9AR0OgWN2woOBi5kSDVxKfd0b7sb1hw5qFeJhXm9H1nu3xSfLeQ==",
- "dependencies": {
- "tslib": "^2.0.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/cmdk": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/cmdk/-/cmdk-0.2.0.tgz",
- "integrity": "sha512-JQpKvEOb86SnvMZbYaFKYhvzFntWBeSZdyii0rZPhKJj9uwJBxu4DaVYDrRN7r3mPop56oPhRw+JYWTKs66TYw==",
- "dependencies": {
- "@radix-ui/react-dialog": "1.0.0",
- "command-score": "0.1.2"
- },
- "peerDependencies": {
- "react": "^18.0.0",
- "react-dom": "^18.0.0"
- }
- },
- "node_modules/command-score": {
- "version": "0.1.2",
- "resolved": "https://registry.npmjs.org/command-score/-/command-score-0.1.2.tgz",
- "integrity": "sha512-VtDvQpIJBvBatnONUsPzXYFVKQQAhuf3XTNOAsdBxCNO/QCtUUd8LSgjn0GVarBkCad6aJCZfXgrjYbl/KRr7w=="
- },
- "node_modules/detect-node-es": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz",
- "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ=="
- },
- "node_modules/get-nonce": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz",
- "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/invariant": {
- "version": "2.2.4",
- "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz",
- "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==",
- "dependencies": {
- "loose-envify": "^1.0.0"
- }
- },
- "node_modules/js-tokens": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
- "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
- },
- "node_modules/loose-envify": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
- "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
- "dependencies": {
- "js-tokens": "^3.0.0 || ^4.0.0"
- },
- "bin": {
- "loose-envify": "cli.js"
- }
- },
- "node_modules/react": {
- "version": "18.2.0",
- "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
- "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==",
- "peer": true,
- "dependencies": {
- "loose-envify": "^1.1.0"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/react-dom": {
- "version": "18.2.0",
- "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz",
- "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==",
- "peer": true,
- "dependencies": {
- "loose-envify": "^1.1.0",
- "scheduler": "^0.23.0"
- },
- "peerDependencies": {
- "react": "^18.2.0"
- }
- },
- "node_modules/react-remove-scroll": {
- "version": "2.5.5",
- "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz",
- "integrity": "sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw==",
- "dependencies": {
- "react-remove-scroll-bar": "^2.3.3",
- "react-style-singleton": "^2.2.1",
- "tslib": "^2.1.0",
- "use-callback-ref": "^1.3.0",
- "use-sidecar": "^1.1.2"
- },
- "engines": {
- "node": ">=10"
- },
- "peerDependencies": {
- "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0",
- "react": "^16.8.0 || ^17.0.0 || ^18.0.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/react-remove-scroll-bar": {
- "version": "2.3.4",
- "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.4.tgz",
- "integrity": "sha512-63C4YQBUt0m6ALadE9XV56hV8BgJWDmmTPY758iIJjfQKt2nYwoUrPk0LXRXcB/yIj82T1/Ixfdpdk68LwIB0A==",
- "dependencies": {
- "react-style-singleton": "^2.2.1",
- "tslib": "^2.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "peerDependencies": {
- "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0",
- "react": "^16.8.0 || ^17.0.0 || ^18.0.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/react-style-singleton": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz",
- "integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==",
- "dependencies": {
- "get-nonce": "^1.0.0",
- "invariant": "^2.2.4",
- "tslib": "^2.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "peerDependencies": {
- "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0",
- "react": "^16.8.0 || ^17.0.0 || ^18.0.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/regenerator-runtime": {
- "version": "0.14.0",
- "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz",
- "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA=="
- },
- "node_modules/scheduler": {
- "version": "0.23.0",
- "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz",
- "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==",
- "peer": true,
- "dependencies": {
- "loose-envify": "^1.1.0"
- }
- },
- "node_modules/tslib": {
- "version": "2.6.2",
- "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz",
- "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q=="
- },
- "node_modules/use-callback-ref": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.0.tgz",
- "integrity": "sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w==",
- "dependencies": {
- "tslib": "^2.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "peerDependencies": {
- "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0",
- "react": "^16.8.0 || ^17.0.0 || ^18.0.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/use-sidecar": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz",
- "integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==",
- "dependencies": {
- "detect-node-es": "^1.1.0",
- "tslib": "^2.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "peerDependencies": {
- "@types/react": "^16.9.0 || ^17.0.0 || ^18.0.0",
- "react": "^16.8.0 || ^17.0.0 || ^18.0.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- }
- }
-}
diff --git a/package.json b/package.json
deleted file mode 100644
index 33d31f0d1..000000000
--- a/package.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "dependencies": {
- "@radix-ui/react-popover": "^1.0.7",
- "cmdk": "^0.2.0"
- }
-}
diff --git a/poetry.lock b/poetry.lock
index 583f1ee6c..ee02dc69b 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -192,13 +192,13 @@ vertex = ["google-auth (>=2,<3)"]
[[package]]
name = "anyio"
-version = "4.3.0"
+version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
- {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
- {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
+ {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
+ {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
]
[package.dependencies]
@@ -242,13 +242,13 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "assemblyai"
-version = "0.23.1"
+version = "0.26.0"
description = "AssemblyAI Python SDK"
optional = false
python-versions = ">=3.8"
files = [
- {file = "assemblyai-0.23.1-py3-none-any.whl", hash = "sha256:2887c7983fa911717cbe37a38d38fcdc8188e62687385b8b6f979546c58354f4"},
- {file = "assemblyai-0.23.1.tar.gz", hash = "sha256:4a3d4d8c4f6c956c6243f0873147ba29da4c6cf5edd6a1b52e6bdaa209526998"},
+ {file = "assemblyai-0.26.0-py3-none-any.whl", hash = "sha256:46689abfe1bf9bccd595f65314aab7deec3b4859630f6882099165862d305421"},
+ {file = "assemblyai-0.26.0.tar.gz", hash = "sha256:7cd7cf3231333e9ea14a130b7a72bf710c66c5d1877bbfd68ab13ff546920e33"},
]
[package.dependencies]
@@ -262,20 +262,22 @@ extras = ["pyaudio (>=0.2.13)"]
[[package]]
name = "astrapy"
-version = "0.7.7"
-description = "AstraPy is a Pythonic SDK for DataStax Astra"
+version = "1.2.0"
+description = "AstraPy is a Pythonic SDK for DataStax Astra and its Data API"
optional = false
-python-versions = ">=3.8.0,<4.0.0"
+python-versions = "<4.0.0,>=3.8.0"
files = [
- {file = "astrapy-0.7.7-py3-none-any.whl", hash = "sha256:e5def4e3c5ceb06dfc996471250dc0c972b729c06336ea4aac006dadfc071a9a"},
- {file = "astrapy-0.7.7.tar.gz", hash = "sha256:4bf81096a0c26cce18a14a34bb5f699649fd7d90b4ec6050f3d7c0274722d769"},
+ {file = "astrapy-1.2.0-py3-none-any.whl", hash = "sha256:5d65242771934c38ebe16f330e9e517968c1437846dabdbe7e48470f7b1782e8"},
+ {file = "astrapy-1.2.0.tar.gz", hash = "sha256:6ce1b421d1ae21fe73373fa36048d8d56c775367886525504f01c48cbb742842"},
]
[package.dependencies]
+bson = ">=0.5.10,<0.6.0"
cassio = ">=0.1.4,<0.2.0"
deprecation = ">=2.1.0,<2.2.0"
httpx = {version = ">=0.25.2,<1", extras = ["http2"]}
toml = ">=0.10.2,<0.11.0"
+uuid6 = ">=2024.1.12,<2024.2.0"
[[package]]
name = "asttokens"
@@ -470,17 +472,17 @@ files = [
[[package]]
name = "boto3"
-version = "1.34.111"
+version = "1.34.113"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "boto3-1.34.111-py3-none-any.whl", hash = "sha256:d6a8e77db316c6e1d9a25f77c795ed1e0a8bc621f863ce26d04b2225d30f2dce"},
- {file = "boto3-1.34.111.tar.gz", hash = "sha256:8f18d212b9199dbbd9d596dd5929685b583ac938c60cceeac2e045c0c5d10323"},
+ {file = "boto3-1.34.113-py3-none-any.whl", hash = "sha256:7e59f0a848be477a4c98a90e7a18a0e284adfb643f7879d2b303c5f493661b7a"},
+ {file = "boto3-1.34.113.tar.gz", hash = "sha256:009cd143509f2ff4c37582c3f45d50f28c95eed68e8a5c36641206bdb597a9ea"},
]
[package.dependencies]
-botocore = ">=1.34.111,<1.35.0"
+botocore = ">=1.34.113,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -489,13 +491,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.34.111"
+version = "1.34.113"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.34.111-py3-none-any.whl", hash = "sha256:e10affb7f372d50da957260adf2753a3f153bf90abe6910e11f09d1e443b5515"},
- {file = "botocore-1.34.111.tar.gz", hash = "sha256:0e0fb9b605c46393d5c7c69bd516b36058334bdc8f389e680c6efcf0727f25db"},
+ {file = "botocore-1.34.113-py3-none-any.whl", hash = "sha256:8ca87776450ef41dd25c327eb6e504294230a5756940d68bcfdedc4a7cdeca97"},
+ {file = "botocore-1.34.113.tar.gz", hash = "sha256:449912ba3c4ded64f21d09d428146dd9c05337b2a112e15511bf2c4888faae79"},
]
[package.dependencies]
@@ -598,6 +600,20 @@ files = [
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
]
+[[package]]
+name = "bson"
+version = "0.5.10"
+description = "BSON codec for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "bson-0.5.10.tar.gz", hash = "sha256:d6511b2ab051139a9123c184de1a04227262173ad593429d21e443d6462d6590"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.4.0"
+six = ">=1.9.0"
+
[[package]]
name = "build"
version = "1.2.1"
@@ -989,13 +1005,13 @@ numpy = "*"
[[package]]
name = "chromadb"
-version = "0.4.24"
+version = "0.5.0"
description = "Chroma."
optional = false
python-versions = ">=3.8"
files = [
- {file = "chromadb-0.4.24-py3-none-any.whl", hash = "sha256:3a08e237a4ad28b5d176685bd22429a03717fe09d35022fb230d516108da01da"},
- {file = "chromadb-0.4.24.tar.gz", hash = "sha256:a5c80b4e4ad9b236ed2d4899a5b9e8002b489293f2881cb2cadab5b199ee1c72"},
+ {file = "chromadb-0.5.0-py3-none-any.whl", hash = "sha256:8193dc65c143b61d8faf87f02c44ecfa778d471febd70de517f51c5d88a06009"},
+ {file = "chromadb-0.5.0.tar.gz", hash = "sha256:7954af614a9ff7b2902ddbd0a162f33f7ec0669e2429903905c4f7876d1f766f"},
]
[package.dependencies]
@@ -1016,7 +1032,6 @@ opentelemetry-sdk = ">=1.2.0"
orjson = ">=3.9.12"
overrides = ">=7.3.1"
posthog = ">=2.4.0"
-pulsar-client = ">=3.1.0"
pydantic = ">=1.9"
pypika = ">=0.48.9"
PyYAML = ">=6.0.0"
@@ -1145,13 +1160,13 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
[[package]]
name = "codespell"
-version = "2.2.6"
+version = "2.3.0"
description = "Codespell"
optional = false
python-versions = ">=3.8"
files = [
- {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"},
- {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"},
+ {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"},
+ {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"},
]
[package.extras]
@@ -1162,13 +1177,13 @@ types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency
[[package]]
name = "cohere"
-version = "5.5.0"
+version = "5.5.3"
description = ""
optional = false
python-versions = "<4.0,>=3.8"
files = [
- {file = "cohere-5.5.0-py3-none-any.whl", hash = "sha256:7792e8898c95f2cb955b2d9f23b8602f73f3b698d59f1a1b4896c53809671da0"},
- {file = "cohere-5.5.0.tar.gz", hash = "sha256:00b492ebf8921e83cb2371f2ee36ddf301422daae3024343a87d4316f02b711b"},
+ {file = "cohere-5.5.3-py3-none-any.whl", hash = "sha256:99d20129713a6dae052368b4839773a214592a76bee345b94a4846d00f702da3"},
+ {file = "cohere-5.5.3.tar.gz", hash = "sha256:8c7ebe2f5bf83fee8e55a24a0acdd4b0e94de274fd0ef32b285978289a03e930"},
]
[package.dependencies]
@@ -1295,63 +1310,63 @@ files = [
[[package]]
name = "coverage"
-version = "7.5.1"
+version = "7.5.2"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"},
- {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"},
- {file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"},
- {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"},
- {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"},
- {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"},
- {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"},
- {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"},
- {file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"},
- {file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"},
- {file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"},
- {file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"},
- {file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"},
- {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"},
- {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"},
- {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"},
- {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"},
- {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"},
- {file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"},
- {file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"},
- {file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"},
- {file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"},
- {file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"},
- {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"},
- {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"},
- {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"},
- {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"},
- {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"},
- {file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"},
- {file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"},
- {file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"},
- {file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"},
- {file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"},
- {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"},
- {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"},
- {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"},
- {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"},
- {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"},
- {file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"},
- {file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"},
- {file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"},
- {file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"},
- {file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"},
- {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"},
- {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"},
- {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"},
- {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"},
- {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"},
- {file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"},
- {file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"},
- {file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"},
- {file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"},
+ {file = "coverage-7.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:554c7327bf0fd688050348e22db7c8e163fb7219f3ecdd4732d7ed606b417263"},
+ {file = "coverage-7.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d0305e02e40c7cfea5d08d6368576537a74c0eea62b77633179748d3519d6705"},
+ {file = "coverage-7.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829fb55ad437d757c70d5b1c51cfda9377f31506a0a3f3ac282bc6a387d6a5f1"},
+ {file = "coverage-7.5.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:894b1acded706f1407a662d08e026bfd0ff1e59e9bd32062fea9d862564cfb65"},
+ {file = "coverage-7.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe76d6dee5e4febefa83998b17926df3a04e5089e3d2b1688c74a9157798d7a2"},
+ {file = "coverage-7.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c7ebf2a37e4f5fea3c1a11e1f47cea7d75d0f2d8ef69635ddbd5c927083211fc"},
+ {file = "coverage-7.5.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20e611fc36e1a0fc7bbf957ef9c635c8807d71fbe5643e51b2769b3cc0fb0b51"},
+ {file = "coverage-7.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7c5c5b7ae2763533152880d5b5b451acbc1089ade2336b710a24b2b0f5239d20"},
+ {file = "coverage-7.5.2-cp310-cp310-win32.whl", hash = "sha256:1e4225990a87df898e40ca31c9e830c15c2c53b1d33df592bc8ef314d71f0281"},
+ {file = "coverage-7.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:976cd92d9420e6e2aa6ce6a9d61f2b490e07cb468968adf371546b33b829284b"},
+ {file = "coverage-7.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5997d418c219dcd4dcba64e50671cca849aaf0dac3d7a2eeeb7d651a5bd735b8"},
+ {file = "coverage-7.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec27e93bbf5976f0465e8936f02eb5add99bbe4e4e7b233607e4d7622912d68d"},
+ {file = "coverage-7.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f11f98753800eb1ec872562a398081f6695f91cd01ce39819e36621003ec52a"},
+ {file = "coverage-7.5.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e34680049eecb30b6498784c9637c1c74277dcb1db75649a152f8004fbd6646"},
+ {file = "coverage-7.5.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e12536446ad4527ac8ed91d8a607813085683bcce27af69e3b31cd72b3c5960"},
+ {file = "coverage-7.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3d3f7744b8a8079d69af69d512e5abed4fb473057625588ce126088e50d05493"},
+ {file = "coverage-7.5.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:431a3917e32223fcdb90b79fe60185864a9109631ebc05f6c5aa03781a00b513"},
+ {file = "coverage-7.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a7c6574225f34ce45466f04751d957b5c5e6b69fca9351db017c9249786172ce"},
+ {file = "coverage-7.5.2-cp311-cp311-win32.whl", hash = "sha256:2b144d142ec9987276aeff1326edbc0df8ba4afbd7232f0ca10ad57a115e95b6"},
+ {file = "coverage-7.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:900532713115ac58bc3491b9d2b52704a05ed408ba0918d57fd72c94bc47fba1"},
+ {file = "coverage-7.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9a42970ce74c88bdf144df11c52c5cf4ad610d860de87c0883385a1c9d9fa4ab"},
+ {file = "coverage-7.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26716a1118c6ce2188283b4b60a898c3be29b480acbd0a91446ced4fe4e780d8"},
+ {file = "coverage-7.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60b66b0363c5a2a79fba3d1cd7430c25bbd92c923d031cae906bdcb6e054d9a2"},
+ {file = "coverage-7.5.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d22eba19273b2069e4efeff88c897a26bdc64633cbe0357a198f92dca94268"},
+ {file = "coverage-7.5.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bb5b92a0ab3d22dfdbfe845e2fef92717b067bdf41a5b68c7e3e857c0cff1a4"},
+ {file = "coverage-7.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1aef719b6559b521ae913ddeb38f5048c6d1a3d366865e8b320270b7bc4693c2"},
+ {file = "coverage-7.5.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8809c0ea0e8454f756e3bd5c36d04dddf222989216788a25bfd6724bfcee342c"},
+ {file = "coverage-7.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1acc2e2ef098a1d4bf535758085f508097316d738101a97c3f996bccba963ea5"},
+ {file = "coverage-7.5.2-cp312-cp312-win32.whl", hash = "sha256:97de509043d3f0f2b2cd171bdccf408f175c7f7a99d36d566b1ae4dd84107985"},
+ {file = "coverage-7.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:8941e35a0e991a7a20a1fa3e3182f82abe357211f2c335a9e6007067c3392fcf"},
+ {file = "coverage-7.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5662bf0f6fb6757f5c2d6279c541a5af55a39772c2362ed0920b27e3ce0e21f7"},
+ {file = "coverage-7.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d9c62cff2ffb4c2a95328488fd7aa96a7a4b34873150650fe76b19c08c9c792"},
+ {file = "coverage-7.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74eeaa13e8200ad72fca9c5f37395fb310915cec6f1682b21375e84fd9770e84"},
+ {file = "coverage-7.5.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f29bf497d51a5077994b265e976d78b09d9d0dff6ca5763dbb4804534a5d380"},
+ {file = "coverage-7.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f96aa94739593ae0707eda9813ce363a0a0374a810ae0eced383340fc4a1f73"},
+ {file = "coverage-7.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:51b6cee539168a912b4b3b040e4042b9e2c9a7ad9c8546c09e4eaeff3eacba6b"},
+ {file = "coverage-7.5.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:59a75e6aa5c25b50b5a1499f9718f2edff54257f545718c4fb100f48d570ead4"},
+ {file = "coverage-7.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29da75ce20cb0a26d60e22658dd3230713c6c05a3465dd8ad040ffc991aea318"},
+ {file = "coverage-7.5.2-cp38-cp38-win32.whl", hash = "sha256:23f2f16958b16152b43a39a5ecf4705757ddd284b3b17a77da3a62aef9c057ef"},
+ {file = "coverage-7.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:9e41c94035e5cdb362beed681b58a707e8dc29ea446ea1713d92afeded9d1ddd"},
+ {file = "coverage-7.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:06d96b9b19bbe7f049c2be3c4f9e06737ec6d8ef8933c7c3a4c557ef07936e46"},
+ {file = "coverage-7.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:878243e1206828908a6b4a9ca7b1aa8bee9eb129bf7186fc381d2646f4524ce9"},
+ {file = "coverage-7.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:482df956b055d3009d10fce81af6ffab28215d7ed6ad4a15e5c8e67cb7c5251c"},
+ {file = "coverage-7.5.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a35c97af60a5492e9e89f8b7153fe24eadfd61cb3a2fb600df1a25b5dab34b7e"},
+ {file = "coverage-7.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb4c7859a3f757a116521d4d3a8a82befad56ea1bdacd17d6aafd113b0071e"},
+ {file = "coverage-7.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1046aab24c48c694f0793f669ac49ea68acde6a0798ac5388abe0a5615b5ec8"},
+ {file = "coverage-7.5.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:448ec61ea9ea7916d5579939362509145caaecf03161f6f13e366aebb692a631"},
+ {file = "coverage-7.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4a00bd5ba8f1a4114720bef283cf31583d6cb1c510ce890a6da6c4268f0070b7"},
+ {file = "coverage-7.5.2-cp39-cp39-win32.whl", hash = "sha256:9f805481d5eff2a96bac4da1570ef662bf970f9a16580dc2c169c8c3183fa02b"},
+ {file = "coverage-7.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:2c79f058e7bec26b5295d53b8c39ecb623448c74ccc8378631f5cb5c16a7e02c"},
+ {file = "coverage-7.5.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:40dbb8e7727560fe8ab65efcddfec1ae25f30ef02e2f2e5d78cfb52a66781ec5"},
+ {file = "coverage-7.5.2.tar.gz", hash = "sha256:13017a63b0e499c59b5ba94a8542fb62864ba3016127d1e4ef30d354fc2b00e9"},
]
[package.dependencies]
@@ -1590,17 +1605,6 @@ files = [
[package.extras]
graph = ["objgraph (>=1.7.2)"]
-[[package]]
-name = "dirtyjson"
-version = "1.0.8"
-description = "JSON decoder for Python that can extract data from the muck"
-optional = false
-python-versions = "*"
-files = [
- {file = "dirtyjson-1.0.8-py3-none-any.whl", hash = "sha256:125e27248435a58acace26d5c2c4c11a1c0de0a9c5124c5a94ba78e517d74f53"},
- {file = "dirtyjson-1.0.8.tar.gz", hash = "sha256:90ca4a18f3ff30ce849d100dcf4a003953c79d3a2348ef056f1d9c22231a25fd"},
-]
-
[[package]]
name = "diskcache"
version = "5.6.3"
@@ -1821,13 +1825,13 @@ develop = ["aiohttp", "furo", "httpx", "mock", "opentelemetry-api", "opentelemet
[[package]]
name = "elasticsearch"
-version = "8.13.1"
+version = "8.13.2"
description = "Python client for Elasticsearch"
optional = false
python-versions = ">=3.7"
files = [
- {file = "elasticsearch-8.13.1-py3-none-any.whl", hash = "sha256:ba2b3f8b30a7a81beae690f1cede52fbcfd29baf3ef5bb028d4fa86972feebd8"},
- {file = "elasticsearch-8.13.1.tar.gz", hash = "sha256:1594d2d1293672db62525bc4688d86cdaf118db0f901808db28dc90ad19b81e1"},
+ {file = "elasticsearch-8.13.2-py3-none-any.whl", hash = "sha256:7412ceae9c0e437a72854ab3123aa1f37110d1635cc645366988b8c0fee98598"},
+ {file = "elasticsearch-8.13.2.tar.gz", hash = "sha256:d51c93431a459b2b7c6c919b6e92a2adc8ac712758de9aeeb16cd4997fc148ad"},
]
[package.dependencies]
@@ -1836,9 +1840,24 @@ elastic-transport = ">=8.13,<9"
[package.extras]
async = ["aiohttp (>=3,<4)"]
orjson = ["orjson (>=3)"]
-requests = ["requests (>=2.4.0,<3.0.0)"]
+requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"]
vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"]
+[[package]]
+name = "email-validator"
+version = "2.1.1"
+description = "A robust email address syntax and deliverability validation library."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"},
+ {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"},
+]
+
+[package.dependencies]
+dnspython = ">=2.0.0"
+idna = ">=2.0.0"
+
[[package]]
name = "emoji"
version = "2.12.1"
@@ -1949,23 +1968,48 @@ files = [
[[package]]
name = "fastapi"
-version = "0.110.3"
+version = "0.111.0"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
files = [
- {file = "fastapi-0.110.3-py3-none-any.whl", hash = "sha256:fd7600612f755e4050beb74001310b5a7e1796d149c2ee363124abdfa0289d32"},
- {file = "fastapi-0.110.3.tar.gz", hash = "sha256:555700b0159379e94fdbfc6bb66a0f1c43f4cf7060f25239af3d84b63a656626"},
+ {file = "fastapi-0.111.0-py3-none-any.whl", hash = "sha256:97ecbf994be0bcbdadedf88c3150252bed7b2087075ac99735403b1b76cc8fc0"},
+ {file = "fastapi-0.111.0.tar.gz", hash = "sha256:b9db9dd147c91cb8b769f7183535773d8741dd46f9dc6676cd82eab510228cd7"},
]
[package.dependencies]
+email_validator = ">=2.0.0"
+fastapi-cli = ">=0.0.2"
+httpx = ">=0.23.0"
+jinja2 = ">=2.11.2"
+orjson = ">=3.2.1"
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
+python-multipart = ">=0.0.7"
starlette = ">=0.37.2,<0.38.0"
typing-extensions = ">=4.8.0"
+ujson = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0"
+uvicorn = {version = ">=0.12.0", extras = ["standard"]}
[package.extras]
all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
+[[package]]
+name = "fastapi-cli"
+version = "0.0.4"
+description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fastapi_cli-0.0.4-py3-none-any.whl", hash = "sha256:a2552f3a7ae64058cdbb530be6fa6dbfc975dc165e4fa66d224c3d396e25e809"},
+ {file = "fastapi_cli-0.0.4.tar.gz", hash = "sha256:e2e9ffaffc1f7767f488d6da34b6f5a377751c996f397902eb6abb99a67bde32"},
+]
+
+[package.dependencies]
+typer = ">=0.12.3"
+
+[package.extras]
+standard = ["fastapi", "uvicorn[standard] (>=0.15.0)"]
+
[[package]]
name = "fastavro"
version = "1.9.4"
@@ -3977,22 +4021,20 @@ adal = ["adal (>=1.0.2)"]
[[package]]
name = "langchain"
-version = "0.1.20"
+version = "0.2.1"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain-0.1.20-py3-none-any.whl", hash = "sha256:09991999fbd6c3421a12db3c7d1f52d55601fc41d9b2a3ef51aab2e0e9c38da9"},
- {file = "langchain-0.1.20.tar.gz", hash = "sha256:f35c95eed8c8375e02dce95a34f2fd4856a4c98269d6dc34547a23dba5beab7e"},
+ {file = "langchain-0.2.1-py3-none-any.whl", hash = "sha256:3e13bf97c5717bce2c281f5117e8778823e8ccf62d949e73d3869448962b1c97"},
+ {file = "langchain-0.2.1.tar.gz", hash = "sha256:5758a315e1ac92eb26dafec5ad0fafa03cafa686aba197d5bb0b1dd28cc03ebe"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
-dataclasses-json = ">=0.5.7,<0.7"
-langchain-community = ">=0.0.38,<0.1"
-langchain-core = ">=0.1.52,<0.2.0"
-langchain-text-splitters = ">=0.0.1,<0.1"
+langchain-core = ">=0.2.0,<0.3.0"
+langchain-text-splitters = ">=0.2.0,<0.3.0"
langsmith = ">=0.1.17,<0.2.0"
numpy = ">=1,<2"
pydantic = ">=1,<3"
@@ -4008,10 +4050,10 @@ cli = ["typer (>=0.9.0,<0.10.0)"]
cohere = ["cohere (>=4,<6)"]
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
embeddings = ["sentence-transformers (>=2,<3)"]
-extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
+extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.1,<0.2)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
javascript = ["esprima (>=4.0.1,<5.0.0)"]
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
-openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"]
+openai = ["openai (<2)", "tiktoken (>=0.7,<1.0)"]
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
@@ -4033,18 +4075,35 @@ langchain-core = ">=0.1.43,<0.3"
[[package]]
name = "langchain-astradb"
-version = "0.1.0"
+version = "0.3.2"
description = "An integration package connecting Astra DB and LangChain"
optional = false
-python-versions = ">=3.8.1,<4.0"
+python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_astradb-0.1.0-py3-none-any.whl", hash = "sha256:c6686089da343fce8c31e36c9162323e88888300b09d56b72347a19449d7361f"},
- {file = "langchain_astradb-0.1.0.tar.gz", hash = "sha256:c8a3426c9daa2beeec2dc7a718186b0b9c388082e9543e0bc07363712cc3b947"},
+ {file = "langchain_astradb-0.3.2-py3-none-any.whl", hash = "sha256:15afc5c0105e863e8f57bf8686490c00be47ed05e47d3263ad1577f2031c0dd5"},
+ {file = "langchain_astradb-0.3.2.tar.gz", hash = "sha256:4316f2c59402779a347a811e1b5470a0570348cb89baac17472d860b63188122"},
]
[package.dependencies]
-astrapy = ">=0.7.7,<0.8.0"
-langchain-core = ">=0.1.31,<0.2.0"
+astrapy = ">=1,<2"
+langchain-core = ">=0.1.31,<0.3"
+numpy = ">=1,<2"
+
+[[package]]
+name = "langchain-chroma"
+version = "0.1.1"
+description = "An integration package connecting Chroma and LangChain"
+optional = false
+python-versions = "<3.13,>=3.8.1"
+files = [
+ {file = "langchain_chroma-0.1.1-py3-none-any.whl", hash = "sha256:7346ba749e5c5735e2a659bc5e3bb2901177bd08448d61682db5a7f882e27b87"},
+ {file = "langchain_chroma-0.1.1.tar.gz", hash = "sha256:fb17c0cc591a425179958ca8cdb25d6cc9e43f4954a1ad4f3fe9cc2d306c455a"},
+]
+
+[package.dependencies]
+chromadb = ">=0.4.0,<0.6.0"
+fastapi = ">=0.95.2,<1"
+langchain-core = ">=0.1.40,<0.3"
numpy = ">=1,<2"
[[package]]
@@ -4064,19 +4123,20 @@ langchain-core = ">=0.1.42,<0.3"
[[package]]
name = "langchain-community"
-version = "0.0.38"
+version = "0.2.1"
description = "Community contributed LangChain integrations."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_community-0.0.38-py3-none-any.whl", hash = "sha256:ecb48660a70a08c90229be46b0cc5f6bc9f38f2833ee44c57dfab9bf3a2c121a"},
- {file = "langchain_community-0.0.38.tar.gz", hash = "sha256:127fc4b75bc67b62fe827c66c02e715a730fef8fe69bd2023d466bab06b5810d"},
+ {file = "langchain_community-0.2.1-py3-none-any.whl", hash = "sha256:b834e2c5ded6903b839fcaf566eee90a0ffae53405a0f7748202725e701d39cd"},
+ {file = "langchain_community-0.2.1.tar.gz", hash = "sha256:079942e8f15da975769ccaae19042b7bba5481c42020bbbd7d8cad73a9393261"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
dataclasses-json = ">=0.5.7,<0.7"
-langchain-core = ">=0.1.52,<0.2.0"
+langchain = ">=0.2.0,<0.3.0"
+langchain-core = ">=0.2.0,<0.3.0"
langsmith = ">=0.1.0,<0.2.0"
numpy = ">=1,<2"
PyYAML = ">=5.3"
@@ -4086,17 +4146,17 @@ tenacity = ">=8.1.0,<9.0.0"
[package.extras]
cli = ["typer (>=0.9.0,<0.10.0)"]
-extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
+extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpathlib (>=0.18,<0.19)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
[[package]]
name = "langchain-core"
-version = "0.1.52"
+version = "0.2.1"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_core-0.1.52-py3-none-any.whl", hash = "sha256:62566749c92e8a1181c255c788548dc16dbc319d896cd6b9c95dc17af9b2a6db"},
- {file = "langchain_core-0.1.52.tar.gz", hash = "sha256:084c3fc452f5a6966c28ab3ec5dbc8b8d26fc3f63378073928f4e29d90b6393f"},
+ {file = "langchain_core-0.2.1-py3-none-any.whl", hash = "sha256:3521e1e573988c47399fca9739270c5d34f8ecec147253ad829eb9ff288f76d5"},
+ {file = "langchain_core-0.2.1.tar.gz", hash = "sha256:49383126168d934559a543ce812c485048d9e6ac9b6798fbf3d4a72b6bba5b0c"},
]
[package.dependencies]
@@ -4112,36 +4172,36 @@ extended-testing = ["jinja2 (>=3,<4)"]
[[package]]
name = "langchain-experimental"
-version = "0.0.58"
+version = "0.0.59"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_experimental-0.0.58-py3-none-any.whl", hash = "sha256:106d3bc7df3dd20687378db7534c2fc21e2589201d43de42f832a1e3913dd55b"},
- {file = "langchain_experimental-0.0.58.tar.gz", hash = "sha256:8ef10ff6b39f44ef468f8f21beb3749957d2262ec64d05db2719934936ca0285"},
+ {file = "langchain_experimental-0.0.59-py3-none-any.whl", hash = "sha256:d6ceb586c15ad35fc619542e86d01f0984a94985324a78a9ed8cd87615ff265d"},
+ {file = "langchain_experimental-0.0.59.tar.gz", hash = "sha256:3a93f5c328f6ee1cd4f9dd8792c535df2d5638cff0d778ee25546804b5282fda"},
]
[package.dependencies]
-langchain = ">=0.1.17,<0.2.0"
-langchain-core = ">=0.1.52,<0.2.0"
+langchain-community = ">=0.2,<0.3"
+langchain-core = ">=0.2,<0.3"
[package.extras]
extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "pandas (>=2.0.1,<3.0.0)", "presidio-analyzer (>=2.2.352,<3.0.0)", "presidio-anonymizer (>=2.2.352,<3.0.0)", "sentence-transformers (>=2,<3)", "tabulate (>=0.9.0,<0.10.0)", "vowpal-wabbit-next (==0.6.0)"]
[[package]]
name = "langchain-google-genai"
-version = "1.0.4"
+version = "1.0.5"
description = "An integration package connecting Google's genai package and LangChain"
optional = false
python-versions = "<4.0,>=3.9"
files = [
- {file = "langchain_google_genai-1.0.4-py3-none-any.whl", hash = "sha256:e567cc401f8d629fce489ee031d258da7fa4b7da0abb8ed926d6990c650b659e"},
- {file = "langchain_google_genai-1.0.4.tar.gz", hash = "sha256:b6beccfe7504ce9f8778a8df23dc49239fd91cf076a55d61759a09fc1373ca26"},
+ {file = "langchain_google_genai-1.0.5-py3-none-any.whl", hash = "sha256:06b1af072e14fe2d4f9257be4bf883ccd544896094f847c2b1ab09b123ba3b9e"},
+ {file = "langchain_google_genai-1.0.5.tar.gz", hash = "sha256:5b515192755fd396a1b61b33d1b08c77fb9b53394cc25954f9d7e9a0f615de9b"},
]
[package.dependencies]
google-generativeai = ">=0.5.2,<0.6.0"
-langchain-core = ">=0.1.45,<0.3"
+langchain-core = ">=0.2.0,<0.3"
[package.extras]
images = ["pillow (>=10.1.0,<11.0.0)"]
@@ -4231,30 +4291,30 @@ pinecone-client = ">=3.2.2,<4.0.0"
[[package]]
name = "langchain-text-splitters"
-version = "0.0.2"
+version = "0.2.0"
description = "LangChain text splitting utilities"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d"},
- {file = "langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"},
+ {file = "langchain_text_splitters-0.2.0-py3-none-any.whl", hash = "sha256:7b4c6a45f8471630a882b321e138329b6897102a5bc62f4c12be1c0b05bb9199"},
+ {file = "langchain_text_splitters-0.2.0.tar.gz", hash = "sha256:b32ab4f7397f7d42c1fa3283fefc2547ba356bd63a68ee9092865e5ad83c82f9"},
]
[package.dependencies]
-langchain-core = ">=0.1.28,<0.3"
+langchain-core = ">=0.2.0,<0.3.0"
[package.extras]
extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
[[package]]
name = "langchainhub"
-version = "0.1.15"
+version = "0.1.16"
description = "The LangChain Hub API client"
optional = false
-python-versions = ">=3.8.1,<4.0"
+python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchainhub-0.1.15-py3-none-any.whl", hash = "sha256:89a0951abd1db255e91c6d545d092a598fc255aa865d1ffc3ce8f93bbeae60e7"},
- {file = "langchainhub-0.1.15.tar.gz", hash = "sha256:fa3ff81a31946860f84c119f1e2f6b7c7707e2bd7ed2394a7313b286d59f3bda"},
+ {file = "langchainhub-0.1.16-py3-none-any.whl", hash = "sha256:a4379a1879cc6b441b8d02cc65e28a54f160fba61c9d1d4b0eddc3a276dff99a"},
+ {file = "langchainhub-0.1.16.tar.gz", hash = "sha256:9f11e68fddb575e70ef4b28800eedbd9eeb180ba508def04f7153ea5b246b6fc"},
]
[package.dependencies]
@@ -4263,7 +4323,7 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langflow-base"
-version = "0.0.47"
+version = "0.0.49"
description = "A Python package with a built-in web application"
optional = false
python-versions = ">=3.10,<3.13"
@@ -4278,12 +4338,12 @@ cachetools = "^5.3.1"
cryptography = "^42.0.5"
docstring-parser = "^0.15"
duckdb = "^0.10.2"
-emoji = "^2.11.0"
-fastapi = "^0.110.1"
+emoji = "^2.12.0"
+fastapi = "^0.111.0"
gunicorn = "^22.0.0"
httpx = "*"
jq = {version = "^1.7.0", markers = "sys_platform != \"win32\""}
-langchain = "~0.1.16"
+langchain = "~0.2.0"
langchain-experimental = "*"
langchainhub = "~0.1.15"
loguru = "^0.7.1"
@@ -4294,15 +4354,16 @@ pandas = "2.2.0"
passlib = "^1.7.4"
pillow = "^10.2.0"
platformdirs = "^4.2.0"
-pydantic = "^2.5.0"
-pydantic-settings = "^2.1.0"
-pypdf = "^4.1.0"
+pydantic = "^2.7.0"
+pydantic-settings = "^2.2.0"
+pypdf = "^4.2.0"
+pyperclip = "^1.8.2"
python-docx = "^1.1.0"
python-jose = "^3.3.0"
python-multipart = "^0.0.7"
python-socketio = "^5.11.0"
rich = "^13.7.0"
-sqlmodel = "^0.0.16"
+sqlmodel = "^0.0.18"
typer = "^0.12.0"
uvicorn = "^0.29.0"
websockets = "*"
@@ -4342,13 +4403,13 @@ openai = ["openai (>=0.27.8)"]
[[package]]
name = "langsmith"
-version = "0.1.62"
+version = "0.1.63"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langsmith-0.1.62-py3-none-any.whl", hash = "sha256:3a9f112643f64d736b8c875390c750fe6485804ea53aeae4edebce0afa4383a5"},
- {file = "langsmith-0.1.62.tar.gz", hash = "sha256:7ef894c14e6d4175fce88ec3bcd5a9c8cf9a456ea77e26e361f519ad082f34a8"},
+ {file = "langsmith-0.1.63-py3-none-any.whl", hash = "sha256:7810afdf5e3f3b472fc581a29371fb96cd843dde2149e048d1b9610325159d1e"},
+ {file = "langsmith-0.1.63.tar.gz", hash = "sha256:a609405b52f6f54df442a142cbf19ab38662d54e532f96028b4c546434d4afdf"},
]
[package.dependencies]
@@ -4358,13 +4419,13 @@ requests = ">=2,<3"
[[package]]
name = "litellm"
-version = "1.38.0"
+version = "1.38.10"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
files = [
- {file = "litellm-1.38.0-py3-none-any.whl", hash = "sha256:bdb63f30999a664ca7361b9c9c7f0a8e3cc10678ddf252455955fd145a96eaa5"},
- {file = "litellm-1.38.0.tar.gz", hash = "sha256:1d77a8572cd9904369393fcdf24f6557e6b01ff9b04d346c2d69c04d23485716"},
+ {file = "litellm-1.38.10-py3-none-any.whl", hash = "sha256:4d33465eacde566832b9d7aa7677476e61aa7ba4ec26631fb1c8411c87219ed1"},
+ {file = "litellm-1.38.10.tar.gz", hash = "sha256:1a0b3088fe4b072f367343a7d7d25e4c5f9990975d9ee7dbf21f3b25ff046bb0"},
]
[package.dependencies]
@@ -4384,12 +4445,12 @@ proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "
[[package]]
name = "llama-cpp-python"
-version = "0.2.75"
+version = "0.2.76"
description = "Python bindings for the llama.cpp library"
optional = true
python-versions = ">=3.8"
files = [
- {file = "llama_cpp_python-0.2.75.tar.gz", hash = "sha256:aee9383935c42e812ee84265b1dafe5f0e3a20ee47216529b64a2ed6caaaed44"},
+ {file = "llama_cpp_python-0.2.76.tar.gz", hash = "sha256:a4e2ab6b74dc87f565a21e4f1617c030f92d5b341375d7173876d238613a50ab"},
]
[package.dependencies]
@@ -4404,292 +4465,6 @@ dev = ["black (>=23.3.0)", "httpx (>=0.24.1)", "mkdocs (>=1.4.3)", "mkdocs-mater
server = ["PyYAML (>=5.1)", "fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "starlette-context (>=0.3.6,<0.4)", "uvicorn (>=0.22.0)"]
test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"]
-[[package]]
-name = "llama-index"
-version = "0.10.38"
-description = "Interface between LLMs and your data"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index-0.10.38-py3-none-any.whl", hash = "sha256:5d521b0ea7111679521292432960d3b9fb53c98d55414bd42d753bc6271d234d"},
- {file = "llama_index-0.10.38.tar.gz", hash = "sha256:5281cfa8b6e7f0f5f12897c00adcd790f7b51c130037f3561fd5630fca37bfb3"},
-]
-
-[package.dependencies]
-llama-index-agent-openai = ">=0.1.4,<0.3.0"
-llama-index-cli = ">=0.1.2,<0.2.0"
-llama-index-core = ">=0.10.38,<0.11.0"
-llama-index-embeddings-openai = ">=0.1.5,<0.2.0"
-llama-index-indices-managed-llama-cloud = ">=0.1.2,<0.2.0"
-llama-index-legacy = ">=0.9.48,<0.10.0"
-llama-index-llms-openai = ">=0.1.13,<0.2.0"
-llama-index-multi-modal-llms-openai = ">=0.1.3,<0.2.0"
-llama-index-program-openai = ">=0.1.3,<0.2.0"
-llama-index-question-gen-openai = ">=0.1.2,<0.2.0"
-llama-index-readers-file = ">=0.1.4,<0.2.0"
-llama-index-readers-llama-parse = ">=0.1.2,<0.2.0"
-
-[[package]]
-name = "llama-index-agent-openai"
-version = "0.2.5"
-description = "llama-index agent openai integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_agent_openai-0.2.5-py3-none-any.whl", hash = "sha256:67536bb104b24734f79324207034d948a2ca7e4cc20dd60cf05d6eeb4b12a586"},
- {file = "llama_index_agent_openai-0.2.5.tar.gz", hash = "sha256:45f4cc670d037a8a67f541d3a4d095f7f61caff6ed2c25702441eb1116d4b495"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.35,<0.11.0"
-llama-index-llms-openai = ">=0.1.5,<0.2.0"
-openai = ">=1.14.0"
-
-[[package]]
-name = "llama-index-cli"
-version = "0.1.12"
-description = "llama-index cli"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_cli-0.1.12-py3-none-any.whl", hash = "sha256:d80d546786f02d3f16f6183b8e86b22b8b5c33a1500923659f2ccbff8d5df634"},
- {file = "llama_index_cli-0.1.12.tar.gz", hash = "sha256:3cf1f706c3c69c6b1aab07fca7faad3959db1709808efd50491b669d38b0b580"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.11.post1,<0.11.0"
-llama-index-embeddings-openai = ">=0.1.1,<0.2.0"
-llama-index-llms-openai = ">=0.1.1,<0.2.0"
-
-[[package]]
-name = "llama-index-core"
-version = "0.10.38.post2"
-description = "Interface between LLMs and your data"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_core-0.10.38.post2-py3-none-any.whl", hash = "sha256:b4b55449bac458d339e84d8d26f322b4dc9f36d3682ebb41fccf5594c295620f"},
- {file = "llama_index_core-0.10.38.post2.tar.gz", hash = "sha256:9eff6e16e9045deca9cb58bcf2a4b9ba39d0da12d7493e6aebaa5badd3b3ebb5"},
-]
-
-[package.dependencies]
-aiohttp = ">=3.8.6,<4.0.0"
-dataclasses-json = "*"
-deprecated = ">=1.2.9.3"
-dirtyjson = ">=1.0.8,<2.0.0"
-fsspec = ">=2023.5.0"
-httpx = "*"
-llamaindex-py-client = ">=0.1.18,<0.2.0"
-nest-asyncio = ">=1.5.8,<2.0.0"
-networkx = ">=3.0"
-nltk = ">=3.8.1,<4.0.0"
-numpy = "*"
-openai = ">=1.1.0"
-pandas = "*"
-pillow = ">=9.0.0"
-PyYAML = ">=6.0.1"
-requests = ">=2.31.0"
-SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]}
-tenacity = ">=8.2.0,<9.0.0"
-tiktoken = ">=0.3.3"
-tqdm = ">=4.66.1,<5.0.0"
-typing-extensions = ">=4.5.0"
-typing-inspect = ">=0.8.0"
-wrapt = "*"
-
-[[package]]
-name = "llama-index-embeddings-openai"
-version = "0.1.10"
-description = "llama-index embeddings openai integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_embeddings_openai-0.1.10-py3-none-any.whl", hash = "sha256:c3cfa83b537ded34d035fc172a945dd444c87fb58a89b02dfbf785b675f9f681"},
- {file = "llama_index_embeddings_openai-0.1.10.tar.gz", hash = "sha256:1bc1fc9b46773a12870c5d3097d3735d7ca33805f12462a8e35ae8a6e5ce1cf6"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.1,<0.11.0"
-
-[[package]]
-name = "llama-index-indices-managed-llama-cloud"
-version = "0.1.6"
-description = "llama-index indices llama-cloud integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_indices_managed_llama_cloud-0.1.6-py3-none-any.whl", hash = "sha256:cba33e1a3677b2a2ae7f239119acbf6dc3818f105edc92315729842b56fbc949"},
- {file = "llama_index_indices_managed_llama_cloud-0.1.6.tar.gz", hash = "sha256:74b3b0e9ebf9d348d3054f9fc0c657031acceb9351c31116ad8d5a7ae4729f5c"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.0,<0.11.0"
-llamaindex-py-client = ">=0.1.19,<0.2.0"
-
-[[package]]
-name = "llama-index-legacy"
-version = "0.9.48"
-description = "Interface between LLMs and your data"
-optional = false
-python-versions = ">=3.8.1,<4.0"
-files = [
- {file = "llama_index_legacy-0.9.48-py3-none-any.whl", hash = "sha256:714ada95beac179b4acefa4d2deff74bb7b2f22b0f699ac247d4cb67738d16d4"},
- {file = "llama_index_legacy-0.9.48.tar.gz", hash = "sha256:82ddc4691edbf49533d65582c249ba22c03fe96fbd3e92f7758dccef28e43834"},
-]
-
-[package.dependencies]
-aiohttp = ">=3.8.6,<4.0.0"
-dataclasses-json = "*"
-deprecated = ">=1.2.9.3"
-dirtyjson = ">=1.0.8,<2.0.0"
-fsspec = ">=2023.5.0"
-httpx = "*"
-nest-asyncio = ">=1.5.8,<2.0.0"
-networkx = ">=3.0"
-nltk = ">=3.8.1,<4.0.0"
-numpy = "*"
-openai = ">=1.1.0"
-pandas = "*"
-requests = ">=2.31.0"
-SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]}
-tenacity = ">=8.2.0,<9.0.0"
-tiktoken = ">=0.3.3"
-typing-extensions = ">=4.5.0"
-typing-inspect = ">=0.8.0"
-
-[package.extras]
-gradientai = ["gradientai (>=1.4.0)"]
-html = ["beautifulsoup4 (>=4.12.2,<5.0.0)"]
-langchain = ["langchain (>=0.0.303)"]
-local-models = ["optimum[onnxruntime] (>=1.13.2,<2.0.0)", "sentencepiece (>=0.1.99,<0.2.0)", "transformers[torch] (>=4.33.1,<5.0.0)"]
-postgres = ["asyncpg (>=0.28.0,<0.29.0)", "pgvector (>=0.1.0,<0.2.0)", "psycopg2-binary (>=2.9.9,<3.0.0)"]
-query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "lm-format-enforcer (>=0.4.3,<0.5.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "scikit-learn", "spacy (>=3.7.1,<4.0.0)"]
-
-[[package]]
-name = "llama-index-llms-openai"
-version = "0.1.20"
-description = "llama-index llms openai integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_llms_openai-0.1.20-py3-none-any.whl", hash = "sha256:f27401acdf9f65bf4d866a100615dcbd81987b890ae5fa9c513d544ba6d711e7"},
- {file = "llama_index_llms_openai-0.1.20.tar.gz", hash = "sha256:0282e4e252893487afd72383b46da5b28ddcd3fb73bace1caefce8a36e9cf492"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.24,<0.11.0"
-
-[[package]]
-name = "llama-index-multi-modal-llms-openai"
-version = "0.1.6"
-description = "llama-index multi-modal-llms openai integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_multi_modal_llms_openai-0.1.6-py3-none-any.whl", hash = "sha256:0b6950a6cf98d16ade7d3b9dd0821ecfe457ca103819ae6c3e66cfc9634ca646"},
- {file = "llama_index_multi_modal_llms_openai-0.1.6.tar.gz", hash = "sha256:10de75a877a444af35306385faad9b9f0624391e55309970564114a080a0578c"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.1,<0.11.0"
-llama-index-llms-openai = ">=0.1.1,<0.2.0"
-
-[[package]]
-name = "llama-index-program-openai"
-version = "0.1.6"
-description = "llama-index program openai integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_program_openai-0.1.6-py3-none-any.whl", hash = "sha256:4660b338503537c5edca1e0dab606af6ce372b4f1b597e2833c6b602447c5d8d"},
- {file = "llama_index_program_openai-0.1.6.tar.gz", hash = "sha256:c6a4980c5ea826088b28b4dee3367edb20221e6d05eb0e05019049190131d772"},
-]
-
-[package.dependencies]
-llama-index-agent-openai = ">=0.1.1,<0.3.0"
-llama-index-core = ">=0.10.1,<0.11.0"
-llama-index-llms-openai = ">=0.1.1,<0.2.0"
-
-[[package]]
-name = "llama-index-question-gen-openai"
-version = "0.1.3"
-description = "llama-index question_gen openai integration"
-optional = false
-python-versions = ">=3.8.1,<4.0"
-files = [
- {file = "llama_index_question_gen_openai-0.1.3-py3-none-any.whl", hash = "sha256:1f83b49e8b2e665030d1ec8c54687d6985d9fa8426147b64e46628a9e489b302"},
- {file = "llama_index_question_gen_openai-0.1.3.tar.gz", hash = "sha256:4486198117a45457d2e036ae60b93af58052893cc7d78fa9b6f47dd47b81e2e1"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.1,<0.11.0"
-llama-index-llms-openai = ">=0.1.1,<0.2.0"
-llama-index-program-openai = ">=0.1.1,<0.2.0"
-
-[[package]]
-name = "llama-index-readers-file"
-version = "0.1.22"
-description = "llama-index readers file integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_readers_file-0.1.22-py3-none-any.whl", hash = "sha256:a8d4a69a9ea659c14ebb22ca9a5560b9c7ec6f501e7f68f6c52f591374165376"},
- {file = "llama_index_readers_file-0.1.22.tar.gz", hash = "sha256:37de54ad0cfbdc607c195532b9a292417a4714f57773570b87027b8dc381f0e2"},
-]
-
-[package.dependencies]
-beautifulsoup4 = ">=4.12.3,<5.0.0"
-llama-index-core = ">=0.10.1,<0.11.0"
-pypdf = ">=4.0.1,<5.0.0"
-striprtf = ">=0.0.26,<0.0.27"
-
-[package.extras]
-pymupdf = ["pymupdf (>=1.23.21,<2.0.0)"]
-
-[[package]]
-name = "llama-index-readers-llama-parse"
-version = "0.1.4"
-description = "llama-index readers llama-parse integration"
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_index_readers_llama_parse-0.1.4-py3-none-any.whl", hash = "sha256:c4914b37d12cceee56fbd185cca80f87d60acbf8ea7a73f9719610180be1fcdd"},
- {file = "llama_index_readers_llama_parse-0.1.4.tar.gz", hash = "sha256:78608b193c818894aefeee0aa303f02b7f80f2e4caf13866c2fd3b0b1023e2c0"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.7,<0.11.0"
-llama-parse = ">=0.4.0,<0.5.0"
-
-[[package]]
-name = "llama-parse"
-version = "0.4.3"
-description = "Parse files into RAG-Optimized formats."
-optional = false
-python-versions = "<4.0,>=3.8.1"
-files = [
- {file = "llama_parse-0.4.3-py3-none-any.whl", hash = "sha256:c48c53a3080daeede293df620dddb1f381e084c31ee2dd44dce3f8615df723e8"},
- {file = "llama_parse-0.4.3.tar.gz", hash = "sha256:01836147b5238873b24a7dd41c5ab942b01b09b92d75570f30cf2861c084a0eb"},
-]
-
-[package.dependencies]
-llama-index-core = ">=0.10.29"
-
-[[package]]
-name = "llamaindex-py-client"
-version = "0.1.19"
-description = ""
-optional = false
-python-versions = "<4,>=3.8"
-files = [
- {file = "llamaindex_py_client-0.1.19-py3-none-any.whl", hash = "sha256:fd9416fd78b97209bf323bc3c7fab314499778563e7274f10853ad560563d10e"},
- {file = "llamaindex_py_client-0.1.19.tar.gz", hash = "sha256:73f74792bb8c092bae6dc626627a09ac13a099fa8d10f8fcc83e17a2b332cca7"},
-]
-
-[package.dependencies]
-httpx = ">=0.20.0"
-pydantic = ">=1.10"
-
[[package]]
name = "locust"
version = "2.28.0"
@@ -4911,6 +4686,21 @@ babel = ["Babel"]
lingua = ["lingua"]
testing = ["pytest"]
+[[package]]
+name = "markdown"
+version = "3.6"
+description = "Python implementation of John Gruber's Markdown."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"},
+ {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"},
+]
+
+[package.extras]
+docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"]
+testing = ["coverage", "pyyaml"]
+
[[package]]
name = "markdown-it-py"
version = "3.0.0"
@@ -5494,31 +5284,6 @@ doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-t
extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"]
test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
-[[package]]
-name = "nltk"
-version = "3.8.1"
-description = "Natural Language Toolkit"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"},
- {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"},
-]
-
-[package.dependencies]
-click = "*"
-joblib = "*"
-regex = ">=2021.8.3"
-tqdm = "*"
-
-[package.extras]
-all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"]
-corenlp = ["requests"]
-machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"]
-plot = ["matplotlib"]
-tgrep = ["pyparsing"]
-twitter = ["twython"]
-
[[package]]
name = "nodeenv"
version = "1.8.0"
@@ -5821,13 +5586,13 @@ sympy = "*"
[[package]]
name = "openai"
-version = "1.30.2"
+version = "1.30.3"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-1.30.2-py3-none-any.whl", hash = "sha256:44316818fbff3845278e862a655c4c041e93d907b04eff64629c2835f29bd58e"},
- {file = "openai-1.30.2.tar.gz", hash = "sha256:f86780f40505de60fa389993d9b7f5564f20acfbe5efcabd5c853a12453af2b0"},
+ {file = "openai-1.30.3-py3-none-any.whl", hash = "sha256:f88119c8a848998be533c71ab8aa832446fa72b7ddbc70917c3f5886dc132051"},
+ {file = "openai-1.30.3.tar.gz", hash = "sha256:8e1bcdca2b96fe3636ab522fa153d88efde1b702d12ec32f1c73e9553ff93f45"},
]
[package.dependencies]
@@ -6756,53 +6521,6 @@ files = [
{file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
]
-[[package]]
-name = "pulsar-client"
-version = "3.5.0"
-description = "Apache Pulsar Python client library"
-optional = false
-python-versions = "*"
-files = [
- {file = "pulsar_client-3.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c18552edb2f785de85280fe624bc507467152bff810fc81d7660fa2dfa861f38"},
- {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18d438e456c146f01be41ef146f649dedc8f7bc714d9eaef94cff2e34099812b"},
- {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18a26a0719841103c7a89eb1492c4a8fedf89adaa386375baecbb4fa2707e88f"},
- {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab0e1605dc5f44a126163fd06cd0a768494ad05123f6e0de89a2c71d6e2d2319"},
- {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdef720891b97656fdce3bf5913ea7729b2156b84ba64314f432c1e72c6117fa"},
- {file = "pulsar_client-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a42544e38773191fe550644a90e8050579476bb2dcf17ac69a4aed62a6cb70e7"},
- {file = "pulsar_client-3.5.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:fd94432ea5d398ea78f8f2e09a217ec5058d26330c137a22690478c031e116da"},
- {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6252ae462e07ece4071213fdd9c76eab82ca522a749f2dc678037d4cbacd40b"},
- {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b4d440b2d74323784328b082872ee2f206c440b5d224d7941eb3c083ec06c6"},
- {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f60af840b8d64a2fac5a0c1ce6ae0ddffec5f42267c6ded2c5e74bad8345f2a1"},
- {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2277a447c3b7f6571cb1eb9fc5c25da3fdd43d0b2fb91cf52054adfadc7d6842"},
- {file = "pulsar_client-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:f20f3e9dd50db2a37059abccad42078b7a4754b8bc1d3ae6502e71c1ad2209f0"},
- {file = "pulsar_client-3.5.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:d61f663d85308e12f44033ba95af88730f581a7e8da44f7a5c080a3aaea4878d"},
- {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1ba0be25b6f747bcb28102b7d906ec1de48dc9f1a2d9eacdcc6f44ab2c9e17"},
- {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181e3e60ac39df72ccb3c415d7aeac61ad0286497a6e02739a560d5af28393a"},
- {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3c72895ff7f51347e4f78b0375b2213fa70dd4790bbb78177b4002846f1fd290"},
- {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:547dba1b185a17eba915e51d0a3aca27c80747b6187e5cd7a71a3ca33921decc"},
- {file = "pulsar_client-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:443b786eed96bc86d2297a6a42e79f39d1abf217ec603e0bd303f3488c0234af"},
- {file = "pulsar_client-3.5.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:15b58f5d759dd6166db8a2d90ed05a38063b05cda76c36d190d86ef5c9249397"},
- {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af34bfe813dddf772a8a298117fa0a036ee963595d8bc8f00d969a0329ae6ed9"},
- {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0fec1dd74e1367d3742ce16679c1807994df60f5e666f440cf39323938fad"},
- {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbcd26ef9c03f96fb9cd91baec3bbd3c4b997834eb3556670d31f41cc25b5f64"},
- {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:afea1d0b6e793fd56e56463145751ff3aa79fdcd5b26e90d0da802a1bbabe07e"},
- {file = "pulsar_client-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:da1ab2fb1bef64b966e9403a0a186ebc90368d99e054ce2cae5b1128478f4ef4"},
- {file = "pulsar_client-3.5.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:9ad5dcc0eb8d2a7c0fb8e1fa146a0c6d4bdaf934f1169080b2c64b2f0573e086"},
- {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5870c6805b1a57962ed908d1173e97e13470415998393925c86a43694420389"},
- {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29cb5fedb969895b78301dc00a979133e69940812b8332e4de948bb0ad3db7cb"},
- {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e53c74bfa59b20c66adea95023169060f5048dd8d843e6ef9cd3b8ee2d23e93b"},
- {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99dbadb13967f1add57010971ed36b5a77d24afcdaea01960d0e55e56cf4ba6f"},
- {file = "pulsar_client-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:058887661d438796f42307dcc8054c84dea88a37683dae36498b95d7e1c39b37"},
-]
-
-[package.dependencies]
-certifi = "*"
-
-[package.extras]
-all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (>=1.9.2)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"]
-avro = ["fastavro (>=1.9.2)"]
-functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"]
-
[[package]]
name = "pure-eval"
version = "0.2.2"
@@ -7264,6 +6982,16 @@ docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"]
image = ["Pillow (>=8.0.0)"]
+[[package]]
+name = "pyperclip"
+version = "1.8.2"
+description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"},
+]
+
[[package]]
name = "pypika"
version = "0.48.9"
@@ -7338,13 +7066,13 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
name = "pytest-cov"
-version = "4.1.0"
+version = "5.0.0"
description = "Pytest plugin for measuring coverage."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
- {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+ {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
]
[package.dependencies]
@@ -7352,7 +7080,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
-testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-instafail"
@@ -8036,28 +7764,28 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
-version = "0.3.7"
+version = "0.4.5"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
- {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
- {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
- {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
- {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
- {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
- {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
- {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
+ {file = "ruff-0.4.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8f58e615dec58b1a6b291769b559e12fdffb53cc4187160a2fc83250eaf54e96"},
+ {file = "ruff-0.4.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:84dd157474e16e3a82745d2afa1016c17d27cb5d52b12e3d45d418bcc6d49264"},
+ {file = "ruff-0.4.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f483ad9d50b00e7fd577f6d0305aa18494c6af139bce7319c68a17180087f4"},
+ {file = "ruff-0.4.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:63fde3bf6f3ad4e990357af1d30e8ba2730860a954ea9282c95fc0846f5f64af"},
+ {file = "ruff-0.4.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e3ba4620dee27f76bbcad97067766026c918ba0f2d035c2fc25cbdd04d9c97"},
+ {file = "ruff-0.4.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:441dab55c568e38d02bbda68a926a3d0b54f5510095c9de7f95e47a39e0168aa"},
+ {file = "ruff-0.4.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1169e47e9c4136c997f08f9857ae889d614c5035d87d38fda9b44b4338909cdf"},
+ {file = "ruff-0.4.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:755ac9ac2598a941512fc36a9070a13c88d72ff874a9781493eb237ab02d75df"},
+ {file = "ruff-0.4.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4b02a65985be2b34b170025a8b92449088ce61e33e69956ce4d316c0fe7cce0"},
+ {file = "ruff-0.4.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:75a426506a183d9201e7e5664de3f6b414ad3850d7625764106f7b6d0486f0a1"},
+ {file = "ruff-0.4.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6e1b139b45e2911419044237d90b60e472f57285950e1492c757dfc88259bb06"},
+ {file = "ruff-0.4.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a6f29a8221d2e3d85ff0c7b4371c0e37b39c87732c969b4d90f3dad2e721c5b1"},
+ {file = "ruff-0.4.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d6ef817124d72b54cc923f3444828ba24fa45c3164bc9e8f1813db2f3d3a8a11"},
+ {file = "ruff-0.4.5-py3-none-win32.whl", hash = "sha256:aed8166c18b1a169a5d3ec28a49b43340949e400665555b51ee06f22813ef062"},
+ {file = "ruff-0.4.5-py3-none-win_amd64.whl", hash = "sha256:b0b03c619d2b4350b4a27e34fd2ac64d0dabe1afbf43de57d0f9d8a05ecffa45"},
+ {file = "ruff-0.4.5-py3-none-win_arm64.whl", hash = "sha256:9d15de3425f53161b3f5a5658d4522e4eee5ea002bf2ac7aa380743dd9ad5fba"},
+ {file = "ruff-0.4.5.tar.gz", hash = "sha256:286eabd47e7d4d521d199cab84deca135557e6d1e0f0d01c29e757c3cb151b54"},
]
[[package]]
@@ -8502,7 +8230,7 @@ files = [
]
[package.dependencies]
-greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""}
+greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
typing-extensions = ">=4.6.0"
[package.extras]
@@ -8532,13 +8260,13 @@ sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "sqlmodel"
-version = "0.0.16"
+version = "0.0.18"
description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
optional = false
-python-versions = ">=3.7,<4.0"
+python-versions = ">=3.7"
files = [
- {file = "sqlmodel-0.0.16-py3-none-any.whl", hash = "sha256:b972f5d319580d6c37ecc417881f6ec4d1ad3ed3583d0ac0ed43234a28bf605a"},
- {file = "sqlmodel-0.0.16.tar.gz", hash = "sha256:966656f18a8e9a2d159eb215b07fb0cf5222acfae3362707ca611848a8a06bd1"},
+ {file = "sqlmodel-0.0.18-py3-none-any.whl", hash = "sha256:d70fdf8fe595e30a918660cf4537b9c5fc2fffdbfcba851a0135de73c3ebcbb7"},
+ {file = "sqlmodel-0.0.18.tar.gz", hash = "sha256:2e520efe03810ef2c268a1004cfc5ef8f8a936312232f38d6c8e62c11af2cac3"},
]
[package.dependencies]
@@ -8613,17 +8341,6 @@ docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
release = ["twine"]
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
-[[package]]
-name = "striprtf"
-version = "0.0.26"
-description = "A simple library to convert rtf to text"
-optional = false
-python-versions = "*"
-files = [
- {file = "striprtf-0.0.26-py3-none-any.whl", hash = "sha256:8c8f9d32083cdc2e8bfb149455aa1cc5a4e0a035893bedc75db8b73becb3a1bb"},
- {file = "striprtf-0.0.26.tar.gz", hash = "sha256:fdb2bba7ac440072d1c41eab50d8d74ae88f60a8b6575c6e2c7805dc462093aa"},
-]
-
[[package]]
name = "structlog"
version = "24.1.0"
@@ -9311,24 +9028,24 @@ urllib3 = ">=2"
[[package]]
name = "types-setuptools"
-version = "70.0.0.20240523"
+version = "70.0.0.20240524"
description = "Typing stubs for setuptools"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-setuptools-70.0.0.20240523.tar.gz", hash = "sha256:268c782f9d657bb0447a97bb9d50debd3a48721bb9d1d8194548d4835798beac"},
- {file = "types_setuptools-70.0.0.20240523-py3-none-any.whl", hash = "sha256:1828c1e2bc93cdb371fd0955fa51e27c3143490fe40a650db3fca22ea44233b3"},
+ {file = "types-setuptools-70.0.0.20240524.tar.gz", hash = "sha256:e31fee7b9d15ef53980526579ac6089b3ae51a005a281acf97178e90ac71aff6"},
+ {file = "types_setuptools-70.0.0.20240524-py3-none-any.whl", hash = "sha256:8f5379b9948682d72a9ab531fbe52932e84c4f38deda570255f9bae3edd766bc"},
]
[[package]]
name = "typing-extensions"
-version = "4.11.0"
+version = "4.12.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
- {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
+ {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"},
+ {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"},
]
[[package]]
@@ -9472,6 +9189,17 @@ h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
+[[package]]
+name = "uuid6"
+version = "2024.1.12"
+description = "New time-based UUID formats which are suited for use as a database key"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "uuid6-2024.1.12-py3-none-any.whl", hash = "sha256:8150093c8d05a331bc0535bc5ef6cf57ac6eceb2404fd319bc10caee2e02c065"},
+ {file = "uuid6-2024.1.12.tar.gz", hash = "sha256:ed0afb3a973057575f9883201baefe402787ca5e11e1d24e377190f0c43f1993"},
+]
+
[[package]]
name = "uvicorn"
version = "0.29.0"
@@ -9584,88 +9312,102 @@ platformdirs = ">=3.9.1,<5"
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+[[package]]
+name = "vulture"
+version = "2.11"
+description = "Find dead code"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "vulture-2.11-py2.py3-none-any.whl", hash = "sha256:12d745f7710ffbf6aeb8279ba9068a24d4e52e8ed333b8b044035c9d6b823aba"},
+ {file = "vulture-2.11.tar.gz", hash = "sha256:f0fbb60bce6511aad87ee0736c502456737490a82d919a44e6d92262cb35f1c2"},
+]
+
+[package.dependencies]
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+
[[package]]
name = "watchfiles"
-version = "0.21.0"
+version = "0.22.0"
description = "Simple, modern and high performance file watching and code reload in python."
optional = false
python-versions = ">=3.8"
files = [
- {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"},
- {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"},
- {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"},
- {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"},
- {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"},
- {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"},
- {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"},
- {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"},
- {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"},
- {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"},
- {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"},
- {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"},
- {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"},
- {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"},
- {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"},
- {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"},
- {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"},
- {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"},
- {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"},
- {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"},
- {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"},
- {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"},
- {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"},
- {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"},
- {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"},
- {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"},
- {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"},
- {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"},
- {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"},
- {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"},
- {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"},
- {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"},
- {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"},
- {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"},
- {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"},
- {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"},
- {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"},
- {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"},
- {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"},
- {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"},
- {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"},
- {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"},
- {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"},
- {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"},
- {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"},
- {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"},
- {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"},
- {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"},
- {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"},
- {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"},
- {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"},
- {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"},
- {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"},
- {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"},
- {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"},
- {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"},
- {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"},
- {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"},
- {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"},
- {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"},
- {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"},
- {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"},
- {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"},
- {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"},
- {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"},
- {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"},
- {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"},
- {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"},
- {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"},
- {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"},
- {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"},
- {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"},
- {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"},
- {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"},
- {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"},
+ {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"},
+ {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"},
+ {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"},
+ {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"},
+ {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"},
+ {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"},
+ {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"},
+ {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"},
+ {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"},
+ {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"},
+ {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"},
+ {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"},
+ {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"},
+ {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"},
+ {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"},
+ {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"},
+ {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"},
+ {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"},
+ {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"},
+ {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"},
+ {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"},
+ {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"},
+ {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"},
+ {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"},
+ {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"},
+ {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"},
+ {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"},
+ {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"},
+ {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"},
+ {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"},
+ {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"},
+ {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"},
+ {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"},
]
[package.dependencies]
@@ -10158,6 +9900,20 @@ files = [
idna = ">=2.0"
multidict = ">=4.0"
+[[package]]
+name = "youtube-transcript-api"
+version = "0.6.2"
+description = "This is an python API which allows you to get the transcripts/subtitles for a given YouTube video. It also works for automatically generated subtitles, supports translating subtitles and it does not require a headless browser, like other selenium based solutions do!"
+optional = false
+python-versions = "*"
+files = [
+ {file = "youtube_transcript_api-0.6.2-py3-none-any.whl", hash = "sha256:019dbf265c6a68a0591c513fff25ed5a116ce6525832aefdfb34d4df5567121c"},
+ {file = "youtube_transcript_api-0.6.2.tar.gz", hash = "sha256:cad223d7620633cec44f657646bffc8bbc5598bd8e70b1ad2fa8277dec305eb7"},
+]
+
+[package.dependencies]
+requests = "*"
+
[[package]]
name = "zep-python"
version = "2.0.0rc6"
@@ -10175,13 +9931,13 @@ pydantic = ">=2.0.0"
[[package]]
name = "zipp"
-version = "3.18.2"
+version = "3.19.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"},
- {file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"},
+ {file = "zipp-3.19.0-py3-none-any.whl", hash = "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec"},
+ {file = "zipp-3.19.0.tar.gz", hash = "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee"},
]
[package.extras]
@@ -10208,48 +9964,54 @@ test = ["zope.testrunner"]
[[package]]
name = "zope-interface"
-version = "6.4.post1"
+version = "6.4.post2"
description = "Interfaces for Python"
optional = false
python-versions = ">=3.7"
files = [
- {file = "zope.interface-6.4.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:16f73c42f10f761051157332943ee1f7cf973cc1c78a50d1960c313a211cca4a"},
- {file = "zope.interface-6.4.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ca89624d0eabc7ce4f299c6d621531cb8b0ebac3bb4f9ebf2d057477602e1b8"},
- {file = "zope.interface-6.4.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de6eaa0b7df493904d24050dcdc3db6589bd94f7e49caab57971fe47a669b3ea"},
- {file = "zope.interface-6.4.post1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a45a7990d143acc37faa905d4a528f5923a5dd30f46536977d8061d10a895b09"},
- {file = "zope.interface-6.4.post1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcefd4012593ee410ebf5728ee98f61b3401f0563c5068e760aa2b7720ca68a0"},
- {file = "zope.interface-6.4.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36234d3b8211d053c42684666c2a04eb1a35e0cec6bc3e54586bb60fb0be3b17"},
- {file = "zope.interface-6.4.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c18218451823ca9b5131ceaacf655fe9dd4e592ebf848cb0a65fe8428bbf604"},
- {file = "zope.interface-6.4.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:563a7192a5baf8d9b189dc598c3555e695e00fdce3eafb88b30d6d3df986fcc5"},
- {file = "zope.interface-6.4.post1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd3ab863a4e7d888728c949ba052a649664dea156bdd7140eb9269bbe6e33205"},
- {file = "zope.interface-6.4.post1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52901ff6d75a4332457610cbd2883f39b386c5bebe0745ecf78e3fe22cfdd0d9"},
- {file = "zope.interface-6.4.post1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:799743a342249c9b9529abd1115a3f81754800e75dea254b58efdd2984009798"},
- {file = "zope.interface-6.4.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:15fa208d7a802c0dd3e9d4d5336619a37efd57f2d2ce830d9f9d5843a2b7daba"},
- {file = "zope.interface-6.4.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91698257850ca5f523a5513a69775e6fb7c18129311e118996f8e9b463d11b0"},
- {file = "zope.interface-6.4.post1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45fb6fe8b5852564e63d6705a7904530a7c886056e6e9aaf938dc5e2bc637097"},
- {file = "zope.interface-6.4.post1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec57dec41c0c8b723dd70da1864d50908c689e1c9cf43f32e9b04c0992e5d93d"},
- {file = "zope.interface-6.4.post1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de6c1dad571276768fd6bc92999e8d942151552662a9048e3384cac05b148985"},
- {file = "zope.interface-6.4.post1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaa1f8967c3f272de80c4bec4b1379f97cd29006323f50558bd2f780a4f637ef"},
- {file = "zope.interface-6.4.post1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8140cf2665c5a07adc285bc859fdda67cfbd7edd62480dfca2211f4798502b54"},
- {file = "zope.interface-6.4.post1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:18f4061c3456c61557e9d7068e435f5db164b38f15f3d9bd995ff185c6db2c62"},
- {file = "zope.interface-6.4.post1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a430d2cc52aef2af0dc45866852730fbc93463cf8cdeb179e8ee04440e0955c4"},
- {file = "zope.interface-6.4.post1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:065f8ff0e034e43b8da05ffed308a9e3311720a2b13b83724f26a8dd6709964d"},
- {file = "zope.interface-6.4.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0940b3b44b6cc0375ea0da5fefee05a9abe8bb53594a3a6e4aafb9f99dc5de8d"},
- {file = "zope.interface-6.4.post1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d200aef16e577682dd54a79ff5f4f897a9807722b54bd8a9bca404679c609d"},
- {file = "zope.interface-6.4.post1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e0678885d07e865047e15be3ebe5c87903cc7f5ca5edfd0045d1c7b43f7fe9d"},
- {file = "zope.interface-6.4.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bf9fa875a9bae5318f24b0d9ab9e2c8a23bccad2979e9e4305eed8119bbe3195"},
- {file = "zope.interface-6.4.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68b13ac49becfaba5b77359559812daac0c5c4b3c0d43cdb293a2dec8db95c24"},
- {file = "zope.interface-6.4.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a00983c5c793b17b829020e11032dabab023c4e0ef12f134b90df802ae5adf2"},
- {file = "zope.interface-6.4.post1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ada5ac54ac7d34bb33423da40b7f3edfc54c6b9623ac9daac7f456dbf25173ba"},
- {file = "zope.interface-6.4.post1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297ee171f40f8f18665bb75f576df7d1ddce19f3e6696ef6acb930dcbfbf693f"},
- {file = "zope.interface-6.4.post1.tar.gz", hash = "sha256:e9961413091e3c9d5c3ed671757049cc6153280f39a154a0b633608efcfdec6b"},
+ {file = "zope.interface-6.4.post2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c"},
+ {file = "zope.interface-6.4.post2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530"},
+ {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341"},
+ {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b"},
+ {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9"},
+ {file = "zope.interface-6.4.post2-cp310-cp310-win_amd64.whl", hash = "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7"},
+ {file = "zope.interface-6.4.post2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede"},
+ {file = "zope.interface-6.4.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827"},
+ {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874"},
+ {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e"},
+ {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8"},
+ {file = "zope.interface-6.4.post2-cp311-cp311-win_amd64.whl", hash = "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250"},
+ {file = "zope.interface-6.4.post2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde"},
+ {file = "zope.interface-6.4.post2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4"},
+ {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b"},
+ {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb"},
+ {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854"},
+ {file = "zope.interface-6.4.post2-cp312-cp312-win_amd64.whl", hash = "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7"},
+ {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e"},
+ {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e"},
+ {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc"},
+ {file = "zope.interface-6.4.post2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1"},
+ {file = "zope.interface-6.4.post2-cp37-cp37m-win_amd64.whl", hash = "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82"},
+ {file = "zope.interface-6.4.post2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b"},
+ {file = "zope.interface-6.4.post2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc"},
+ {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934"},
+ {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1"},
+ {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43"},
+ {file = "zope.interface-6.4.post2-cp38-cp38-win_amd64.whl", hash = "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5"},
+ {file = "zope.interface-6.4.post2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2"},
+ {file = "zope.interface-6.4.post2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438"},
+ {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4"},
+ {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79"},
+ {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671"},
+ {file = "zope.interface-6.4.post2-cp39-cp39-win_amd64.whl", hash = "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15"},
+ {file = "zope.interface-6.4.post2.tar.gz", hash = "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e"},
]
[package.dependencies]
setuptools = "*"
[package.extras]
-docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx_rtd_theme"]
+docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"]
test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
@@ -10260,4 +10022,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "9406313d19280623987bf2ee831626bc79ec0abf0ec1fe547df89bc9b1b93b0d"
+content-hash = "36778b105f6f6e5efd0c1d37651d7b97defb0bc0db74b868a41e38de22251924"
diff --git a/pyproject.toml b/pyproject.toml
index cc7198812..83834f95e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
-version = "1.0.0a36"
+version = "1.0.0a38"
description = "A Python package with a built-in web application"
authors = ["Langflow "]
maintainers = [
@@ -29,20 +29,20 @@ python = ">=3.10,<3.13"
langflow-base = { path = "./src/backend/base", develop = true }
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
-google-api-python-client = "^2.118.0"
+google-api-python-client = "^2.130.0"
huggingface-hub = { version = "^0.20.0", extras = ["inference"] }
llama-cpp-python = { version = "~0.2.0", optional = true }
networkx = "^3.1"
-fake-useragent = "^1.4.0"
+fake-useragent = "^1.5.0"
psycopg2-binary = "^2.9.6"
pyarrow = "^14.0.0"
wikipedia = "^1.4.0"
-qdrant-client = "^1.7.0"
+qdrant-client = "^1.9.0"
weaviate-client = "*"
sentence-transformers = { version = "^2.3.1", optional = true }
ctransformers = { version = "^0.2.10", optional = true }
-cohere = "^5.1.7"
-faiss-cpu = "^1.7.4"
+cohere = "^5.5.3"
+faiss-cpu = "^1.8.0"
types-cachetools = "^5.3.0.5"
pinecone-client = "^3.0.3"
pymongo = "^4.6.0"
@@ -56,7 +56,7 @@ redis = { version = "^5.0.1", optional = true }
flower = { version = "^2.0.0", optional = true }
metaphor-python = "^0.1.11"
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
-langfuse = "^2.9.0"
+langfuse = "^2.33.0"
metal-sdk = "^2.5.0"
markupsafe = "^2.1.3"
# jq is not available for windows
@@ -69,14 +69,12 @@ langchain-google-genai = "^1.0.1"
langchain-cohere = "^0.1.0rc1"
elasticsearch = "^8.12.0"
pytube = "^15.0.0"
-llama-index = "^0.10.13"
-# unstructured = { extras = ["md"], version = "^0.12.4" }
dspy-ai = "^2.4.0"
-assemblyai = "^0.23.1"
-litellm = "^1.34.22"
-chromadb = "^0.4.24"
+assemblyai = "^0.26.0"
+litellm = "^1.38.0"
+chromadb = "^0.5.0"
langchain-anthropic = "^0.1.6"
-langchain-astradb = "^0.1.0"
+langchain-astradb = "^0.3.0"
langchain-openai = "^0.1.1"
zep-python = { version = "^2.0.0rc5", allow-prereleases = true }
langchain-google-vertexai = "^1.0.3"
@@ -84,26 +82,29 @@ langchain-groq = "^0.1.3"
langchain-pinecone = "^0.1.0"
langchain-mistralai = "^0.1.6"
couchbase = "^4.2.1"
+youtube-transcript-api = "^0.6.2"
+markdown = "^3.6"
+langchain-chroma = "^0.1.1"
[tool.poetry.group.dev.dependencies]
types-redis = "^4.6.0.5"
ipykernel = "^6.29.0"
-mypy = "^1.9.0"
-ruff = "^0.3.5"
+mypy = "^1.10.0"
+ruff = "^0.4.5"
httpx = "*"
-pytest = "^8.1.0"
-types-requests = "^2.31.0"
-requests = "^2.31.0"
-pytest-cov = "^4.1.0"
+pytest = "^8.2.0"
+types-requests = "^2.32.0"
+requests = "^2.32.0"
+pytest-cov = "^5.0.0"
pandas-stubs = "^2.1.4.231227"
types-pillow = "^10.2.0.20240213"
types-pyyaml = "^6.0.12.8"
types-python-jose = "^3.3.4.8"
types-passlib = "^1.7.7.13"
locust = "^2.23.1"
-pytest-mock = "^3.12.0"
-pytest-xdist = "^3.5.0"
+pytest-mock = "^3.14.0"
+pytest-xdist = "^3.6.0"
types-pywin32 = "^306.0.0.4"
types-google-cloud-ndb = "^2.2.0.0"
pytest-sugar = "^1.0.0"
@@ -112,6 +113,7 @@ pytest-instafail = "^0.5.0"
pytest-asyncio = "^0.23.0"
pytest-profiling = "^1.7.0"
pre-commit = "^3.7.0"
+vulture = "^2.11"
[tool.poetry.extras]
deploy = ["celery", "redis", "flower"]
@@ -132,7 +134,7 @@ ignore-regex = '.*(Stati Uniti|Tense=Pres).*'
[tool.pytest.ini_options]
minversion = "6.0"
-addopts = "-ra"
+addopts = "-ra -n auto"
testpaths = ["tests", "integration"]
console_output_style = "progress"
filterwarnings = ["ignore::DeprecationWarning"]
diff --git a/render.yaml b/render.yaml
index 583a3c324..9276efee1 100644
--- a/render.yaml
+++ b/render.yaml
@@ -3,9 +3,9 @@ services:
- type: web
name: langflow
runtime: docker
- dockerfilePath: ./Dockerfile
+ dockerfilePath: ./docker/render.Dockerfile
repo: https://github.com/langflow-ai/langflow
- branch: main
+ branch: dev
healthCheckPath: /health
autoDeploy: false
envVars:
diff --git a/GCP_DEPLOYMENT.md b/scripts/gcp/GCP_DEPLOYMENT.md
similarity index 99%
rename from GCP_DEPLOYMENT.md
rename to scripts/gcp/GCP_DEPLOYMENT.md
index 9f17e550b..a848d3d2b 100644
--- a/GCP_DEPLOYMENT.md
+++ b/scripts/gcp/GCP_DEPLOYMENT.md
@@ -20,8 +20,7 @@ When running as a [spot (preemptible) instance](https://cloud.google.com/compute
## Pricing (approximate)
-> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
->
+> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator) >
| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
| ------------------ | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | -------------------------------------------------------------------------- |
diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py
index dfb784c90..4d59b3149 100644
--- a/src/backend/base/langflow/__main__.py
+++ b/src/backend/base/langflow/__main__.py
@@ -22,7 +22,8 @@ from sqlmodel import select
from langflow.main import setup_app
from langflow.services.database.models.folder.utils import create_default_folder_if_it_doesnt_exist
from langflow.services.database.utils import session_getter
-from langflow.services.deps import get_db_service
+from langflow.services.deps import get_db_service, get_settings_service, session_scope
+from langflow.services.settings.constants import DEFAULT_SUPERUSER
from langflow.services.utils import initialize_services
from langflow.utils.logger import configure, logger
from langflow.utils.util import update_settings
@@ -83,7 +84,6 @@ def run(
help="Path to the directory containing custom components.",
envvar="LANGFLOW_COMPONENTS_PATH",
),
- config: str = typer.Option(Path(__file__).parent / "config.yaml", help="Path to the configuration file."),
# .env file param
env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."),
log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
@@ -132,7 +132,6 @@ def run(
load_dotenv(env_file, override=True)
update_settings(
- config,
dev=dev,
remove_api_keys=remove_api_keys,
cache=cache,
@@ -510,6 +509,66 @@ def migration(
display_results(results)
+@app.command()
+def api_key(
+ log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
+):
+ """
+ Creates an API key for the default superuser if AUTO_LOGIN is enabled.
+
+ Args:
+ log_level (str, optional): Logging level. Defaults to "error".
+
+ Returns:
+ None
+ """
+ configure(log_level=log_level)
+ initialize_services()
+ settings_service = get_settings_service()
+ auth_settings = settings_service.auth_settings
+ if not auth_settings.AUTO_LOGIN:
+ typer.echo("Auto login is disabled. API keys cannot be created through the CLI.")
+ return
+ with session_scope() as session:
+ from langflow.services.database.models.user.model import User
+
+ superuser = session.exec(select(User).where(User.username == DEFAULT_SUPERUSER)).first()
+ if not superuser:
+ typer.echo("Default superuser not found. This command requires a superuser and AUTO_LOGIN to be enabled.")
+ return
+ from langflow.services.database.models.api_key import ApiKey, ApiKeyCreate
+ from langflow.services.database.models.api_key.crud import create_api_key, delete_api_key
+
+ api_key = session.exec(select(ApiKey).where(ApiKey.user_id == superuser.id)).first()
+ if api_key:
+ delete_api_key(session, api_key.id)
+
+ api_key_create = ApiKeyCreate(name="CLI")
+ unmasked_api_key = create_api_key(session, api_key_create, user_id=superuser.id)
+ session.commit()
+ # Create a banner to display the API key and tell the user it won't be shown again
+ api_key_banner(unmasked_api_key)
+
+
+def api_key_banner(unmasked_api_key):
+ is_mac = platform.system() == "Darwin"
+ import pyperclip # type: ignore
+
+ pyperclip.copy(unmasked_api_key.api_key)
+ panel = Panel(
+ f"[bold]API Key Created Successfully:[/bold]\n\n"
+ f"[bold blue]{unmasked_api_key.api_key}[/bold blue]\n\n"
+ "This is the only time the API key will be displayed. \n"
+ "Make sure to store it in a secure location. \n\n"
+ f"The API key has been copied to your clipboard. [bold]{['Ctrl','Cmd'][is_mac]} + V[/bold] to paste it.",
+ box=box.ROUNDED,
+ border_style="blue",
+ expand=False,
+ )
+ console = Console()
+ console.print(panel)
+
+
def main():
with warnings.catch_warnings():
warnings.simplefilter("ignore")
diff --git a/src/backend/base/langflow/api/utils.py b/src/backend/base/langflow/api/utils.py
index f7e0548fe..ffb5f22c9 100644
--- a/src/backend/base/langflow/api/utils.py
+++ b/src/backend/base/langflow/api/utils.py
@@ -1,4 +1,3 @@
-import os
import warnings
from pathlib import Path
from typing import TYPE_CHECKING, Optional
diff --git a/src/backend/base/langflow/api/v1/callback.py b/src/backend/base/langflow/api/v1/callback.py
index b326311ac..6a60ea037 100644
--- a/src/backend/base/langflow/api/v1/callback.py
+++ b/src/backend/base/langflow/api/v1/callback.py
@@ -1,13 +1,12 @@
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from uuid import UUID
-
-from langchain.schema import AgentAction, AgentFinish
from langchain_core.callbacks.base import AsyncCallbackHandler
from loguru import logger
from langflow.api.v1.schemas import ChatResponse, PromptResponse
from langflow.services.deps import get_chat_service, get_socket_service
from langflow.utils.util import remove_ansi_escape_codes
+from langchain_core.agents import AgentAction, AgentFinish
if TYPE_CHECKING:
from langflow.services.socket.service import SocketIOService
diff --git a/src/backend/base/langflow/api/v1/endpoints.py b/src/backend/base/langflow/api/v1/endpoints.py
index e529de81c..b47774c8b 100644
--- a/src/backend/base/langflow/api/v1/endpoints.py
+++ b/src/backend/base/langflow/api/v1/endpoints.py
@@ -18,10 +18,9 @@ from langflow.api.v1.schemas import (
UpdateCustomComponentRequest,
UploadFileResponse,
)
+from langflow.custom import CustomComponent
+from langflow.custom.utils import build_custom_component_template
from langflow.graph.graph.base import Graph
-from langflow.graph.schema import RunOutputs
-from langflow.interface.custom.custom_component import CustomComponent
-from langflow.interface.custom.utils import build_custom_component_template
from langflow.processing.process import process_tweaks, run_graph_internal
from langflow.schema.graph import Tweaks
from langflow.services.auth.utils import api_key_security, get_current_active_user
@@ -44,7 +43,7 @@ def get_all(
logger.debug("Building langchain types dict")
try:
- all_types_dict = get_all_types_dict(settings_service.settings.COMPONENTS_PATH)
+ all_types_dict = get_all_types_dict(settings_service.settings.components_path)
return all_types_dict
except Exception as exc:
logger.exception(exc)
diff --git a/src/backend/base/langflow/api/v1/flows.py b/src/backend/base/langflow/api/v1/flows.py
index 7fe7c516b..ce7d34cf6 100644
--- a/src/backend/base/langflow/api/v1/flows.py
+++ b/src/backend/base/langflow/api/v1/flows.py
@@ -38,7 +38,10 @@ def create_flow(
db_flow.updated_at = datetime.now(timezone.utc)
if db_flow.folder_id is None:
- default_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first()
+ # Make sure flows always have a folder
+ default_folder = session.exec(
+ select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME, Folder.user_id == current_user.id)
+ ).first()
if default_folder:
db_flow.folder_id = default_folder.id
@@ -127,7 +130,7 @@ def update_flow(
if not db_flow:
raise HTTPException(status_code=404, detail="Flow not found")
flow_data = flow.model_dump(exclude_unset=True)
- if settings_service.settings.REMOVE_API_KEYS:
+ if settings_service.settings.remove_api_keys:
flow_data = remove_api_keys(flow_data)
for key, value in flow_data.items():
if value is not None:
diff --git a/src/backend/base/langflow/api/v1/folders.py b/src/backend/base/langflow/api/v1/folders.py
index 96729133b..3aa57842c 100644
--- a/src/backend/base/langflow/api/v1/folders.py
+++ b/src/backend/base/langflow/api/v1/folders.py
@@ -3,12 +3,11 @@ from uuid import UUID
import orjson
from fastapi import APIRouter, Depends, File, HTTPException, Response, UploadFile, status
-from sqlalchemy import update
+from sqlalchemy import or_, update
from sqlmodel import Session, select
from langflow.api.v1.flows import create_flows
from langflow.api.v1.schemas import FlowListCreate, FlowListReadWithFolderName
-from langflow.initial_setup.setup import STARTER_FOLDER_NAME
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.flow.model import Flow, FlowCreate, FlowRead
from langflow.services.database.models.folder.constants import DEFAULT_FOLDER_NAME
@@ -35,6 +34,18 @@ def create_folder(
try:
new_folder = Folder.model_validate(folder, from_attributes=True)
new_folder.user_id = current_user.id
+
+ folder_results = session.exec(
+ select(Folder).where(
+ Folder.name.like(f"{new_folder.name}%"), # type: ignore
+ Folder.user_id == current_user.id,
+ )
+ )
+ existing_folder_names = [folder.name for folder in folder_results]
+
+ if existing_folder_names:
+ new_folder.name = f"{new_folder.name} ({len(existing_folder_names) + 1})"
+
session.add(new_folder)
session.commit()
session.refresh(new_folder)
@@ -63,16 +74,11 @@ def read_folders(
current_user: User = Depends(get_current_active_user),
):
try:
- folders = session.exec(select(Folder).where(Folder.user_id == current_user.id)).all()
- return folders
- except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
-
-
-@router.get("/starter-projects", response_model=FolderReadWithFlows, status_code=200)
-def read_starter_folders(*, session: Session = Depends(get_session)):
- try:
- folders = session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first()
+ folders = session.exec(
+ select(Folder).where(
+ or_(Folder.user_id == current_user.id, Folder.user_id == None) # type: ignore # noqa: E711
+ )
+ ).all()
return folders
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
diff --git a/src/backend/base/langflow/api/v1/store.py b/src/backend/base/langflow/api/v1/store.py
index 7d0e9bff9..d645480d0 100644
--- a/src/backend/base/langflow/api/v1/store.py
+++ b/src/backend/base/langflow/api/v1/store.py
@@ -54,7 +54,7 @@ def check_if_store_is_enabled(
settings_service=Depends(get_settings_service),
):
return {
- "enabled": settings_service.settings.STORE,
+ "enabled": settings_service.settings.store,
}
diff --git a/src/backend/base/langflow/base/io/chat.py b/src/backend/base/langflow/base/io/chat.py
index 7cdd45607..131ea1a26 100644
--- a/src/backend/base/langflow/base/io/chat.py
+++ b/src/backend/base/langflow/base/io/chat.py
@@ -1,9 +1,9 @@
from typing import Optional, Union
from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
from langflow.helpers.record import records_to_text
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.memory import store_message
from langflow.schema import Record
diff --git a/src/backend/base/langflow/base/io/text.py b/src/backend/base/langflow/base/io/text.py
index 5974e7d13..5ecfea11a 100644
--- a/src/backend/base/langflow/base/io/text.py
+++ b/src/backend/base/langflow/base/io/text.py
@@ -1,8 +1,8 @@
from typing import Optional
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
from langflow.helpers.record import records_to_text
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/base/memory/memory.py b/src/backend/base/langflow/base/memory/memory.py
index 1bb2e22ff..0fb8cf209 100644
--- a/src/backend/base/langflow/base/memory/memory.py
+++ b/src/backend/base/langflow/base/memory/memory.py
@@ -1,6 +1,6 @@
from typing import Optional
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/base/prompts/api_utils.py b/src/backend/base/langflow/base/prompts/api_utils.py
index 89a17399c..d5a6aac28 100644
--- a/src/backend/base/langflow/base/prompts/api_utils.py
+++ b/src/backend/base/langflow/base/prompts/api_utils.py
@@ -1,10 +1,10 @@
from fastapi import HTTPException
-from langchain.prompts import PromptTemplate
from loguru import logger
from langflow.api.v1.base import INVALID_NAMES, check_input_variables
from langflow.interface.utils import extract_input_variables_from_prompt
from langflow.template.field.prompt import DefaultPromptField
+from langchain_core.prompts import PromptTemplate
def validate_prompt(prompt_template: str, silent_errors: bool = False) -> list[str]:
diff --git a/src/backend/base/langflow/components/agents/AgentInitializer.py b/src/backend/base/langflow/components/agents/AgentInitializer.py
deleted file mode 100644
index d1f09d5cf..000000000
--- a/src/backend/base/langflow/components/agents/AgentInitializer.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from typing import Callable, List, Optional, Union
-
-from langchain.agents import AgentExecutor, AgentType, initialize_agent, types
-
-from langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool
-from langflow.interface.custom.custom_component import CustomComponent
-
-
-class AgentInitializerComponent(CustomComponent):
- display_name: str = "Agent Initializer"
- description: str = "Initialize a Langchain Agent."
- documentation: str = "https://python.langchain.com/docs/modules/agents/agent_types/"
-
- def build_config(self):
- agents = list(types.AGENT_TO_CLASS.keys())
- # field_type and required are optional
- return {
- "agent": {"options": agents, "value": agents[0], "display_name": "Agent Type"},
- "max_iterations": {"display_name": "Max Iterations", "value": 10},
- "memory": {"display_name": "Memory"},
- "tools": {"display_name": "Tools"},
- "llm": {"display_name": "Language Model"},
- "code": {"advanced": True},
- }
-
- def build(
- self,
- agent: str,
- llm: BaseLanguageModel,
- tools: List[Tool],
- max_iterations: int,
- memory: Optional[BaseChatMemory] = None,
- ) -> Union[AgentExecutor, Callable]:
- agent = AgentType(agent)
- if memory:
- return initialize_agent(
- tools=tools,
- llm=llm,
- agent=agent,
- memory=memory,
- return_intermediate_steps=True,
- handle_parsing_errors=True,
- max_iterations=max_iterations,
- )
- return initialize_agent(
- tools=tools,
- llm=llm,
- agent=agent,
- return_intermediate_steps=True,
- handle_parsing_errors=True,
- max_iterations=max_iterations,
- )
diff --git a/src/backend/base/langflow/components/agents/JsonAgent.py b/src/backend/base/langflow/components/agents/JsonAgent.py
index 5fa342417..17826ef00 100644
--- a/src/backend/base/langflow/components/agents/JsonAgent.py
+++ b/src/backend/base/langflow/components/agents/JsonAgent.py
@@ -1,10 +1,9 @@
-from langchain.agents import AgentExecutor, create_json_agent
+from langchain.agents import AgentExecutor
+from langchain_community.agent_toolkits import create_json_agent
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
-from langflow.field_typing import (
- BaseLanguageModel,
-)
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel
class JsonAgentComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py b/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py
deleted file mode 100644
index bda2579f6..000000000
--- a/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py
+++ /dev/null
@@ -1,102 +0,0 @@
-from typing import List, Optional
-
-from langchain.agents.agent import AgentExecutor
-from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import _get_default_system_message
-from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
-from langchain.memory.token_buffer import ConversationTokenBufferMemory
-from langchain.prompts import SystemMessagePromptTemplate
-from langchain.prompts.chat import MessagesPlaceholder
-from langchain.schema.memory import BaseMemory
-from langchain.tools import Tool
-from langchain_openai import ChatOpenAI
-
-from langflow.field_typing.range_spec import RangeSpec
-from langflow.interface.custom.custom_component import CustomComponent
-from pydantic.v1 import SecretStr
-
-
-class ConversationalAgent(CustomComponent):
- display_name: str = "OpenAI Conversational Agent"
- description: str = "Conversational Agent that can use OpenAI's function calling API"
- icon = "OpenAI"
-
- def build_config(self):
- openai_function_models = [
- "gpt-4-turbo-preview",
- "gpt-4-0125-preview",
- "gpt-4-1106-preview",
- "gpt-4-vision-preview",
- "gpt-3.5-turbo-0125",
- "gpt-3.5-turbo-1106",
- ]
- return {
- "tools": {"display_name": "Tools"},
- "memory": {"display_name": "Memory"},
- "system_message": {"display_name": "System Message"},
- "max_token_limit": {"display_name": "Max Token Limit"},
- "model_name": {
- "display_name": "Model Name",
- "options": openai_function_models,
- "value": openai_function_models[0],
- },
- "code": {"show": False},
- "temperature": {
- "display_name": "Temperature",
- "value": 0.2,
- "rangeSpec": RangeSpec(min=0, max=2, step=0.1),
- },
- }
-
- def build(
- self,
- model_name: str,
- openai_api_key: str,
- tools: List[Tool],
- openai_api_base: Optional[str] = None,
- memory: Optional[BaseMemory] = None,
- system_message: Optional[SystemMessagePromptTemplate] = None,
- max_token_limit: int = 2000,
- temperature: float = 0.9,
- ) -> AgentExecutor:
- if openai_api_key:
- api_key = SecretStr(openai_api_key)
- else:
- api_key = None
-
- llm = ChatOpenAI(
- model=model_name,
- api_key=api_key,
- base_url=openai_api_base,
- max_tokens=max_token_limit,
- temperature=temperature,
- )
- if not memory:
- memory_key = "chat_history"
- memory = ConversationTokenBufferMemory(
- memory_key=memory_key,
- return_messages=True,
- output_key="output",
- llm=llm,
- max_token_limit=max_token_limit,
- )
- else:
- memory_key = memory.memory_key # type: ignore
-
- _system_message = system_message or _get_default_system_message()
- prompt = OpenAIFunctionsAgent.create_prompt(
- system_message=_system_message, # type: ignore
- extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)],
- )
- agent = OpenAIFunctionsAgent(
- llm=llm,
- tools=tools,
- prompt=prompt, # type: ignore
- )
- return AgentExecutor(
- agent=agent,
- tools=tools, # type: ignore
- memory=memory,
- verbose=True,
- return_intermediate_steps=True,
- handle_parsing_errors=True,
- )
diff --git a/src/backend/base/langflow/components/agents/SQLAgent.py b/src/backend/base/langflow/components/agents/SQLAgent.py
index abde4ab94..cd6b03f94 100644
--- a/src/backend/base/langflow/components/agents/SQLAgent.py
+++ b/src/backend/base/langflow/components/agents/SQLAgent.py
@@ -1,12 +1,12 @@
from typing import Callable, Union
from langchain.agents import AgentExecutor
-from langchain_community.utilities import SQLDatabase
from langchain_community.agent_toolkits import SQLDatabaseToolkit
from langchain_community.agent_toolkits.sql.base import create_sql_agent
+from langchain_community.utilities import SQLDatabase
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
-from langflow.interface.custom.custom_component import CustomComponent
class SQLAgentComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/agents/VectorStoreAgent.py b/src/backend/base/langflow/components/agents/VectorStoreAgent.py
index 095f9da41..3cba51a09 100644
--- a/src/backend/base/langflow/components/agents/VectorStoreAgent.py
+++ b/src/backend/base/langflow/components/agents/VectorStoreAgent.py
@@ -3,8 +3,8 @@ from typing import Callable, Union
from langchain.agents import AgentExecutor, create_vectorstore_agent
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
-from langflow.interface.custom.custom_component import CustomComponent
class VectorStoreAgentComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py
index 514a9767c..e483f0d2c 100644
--- a/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py
+++ b/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py
@@ -4,7 +4,7 @@ from langchain.agents import create_vectorstore_router_agent
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit
from langchain_core.language_models.base import BaseLanguageModel
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class VectorStoreRouterAgentComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/agents/__init__.py b/src/backend/base/langflow/components/agents/__init__.py
index f2a118b58..8bd64bab0 100644
--- a/src/backend/base/langflow/components/agents/__init__.py
+++ b/src/backend/base/langflow/components/agents/__init__.py
@@ -1,17 +1,13 @@
-from .AgentInitializer import AgentInitializerComponent
from .CSVAgent import CSVAgentComponent
from .JsonAgent import JsonAgentComponent
-from .OpenAIConversationalAgent import ConversationalAgent
from .SQLAgent import SQLAgentComponent
from .VectorStoreAgent import VectorStoreAgentComponent
from .VectorStoreRouterAgent import VectorStoreRouterAgentComponent
from .XMLAgent import XMLAgentComponent
__all__ = [
- "AgentInitializerComponent",
"CSVAgentComponent",
"JsonAgentComponent",
- "ConversationalAgent",
"SQLAgentComponent",
"VectorStoreAgentComponent",
"VectorStoreRouterAgentComponent",
diff --git a/src/backend/base/langflow/components/chains/ConversationChain.py b/src/backend/base/langflow/components/chains/ConversationChain.py
index 2b8dd09a3..0801f4623 100644
--- a/src/backend/base/langflow/components/chains/ConversationChain.py
+++ b/src/backend/base/langflow/components/chains/ConversationChain.py
@@ -2,8 +2,8 @@ from typing import Optional
from langchain.chains import ConversationChain
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, BaseMemory, Text
-from langflow.interface.custom.custom_component import CustomComponent
class ConversationChainComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/chains/LLMChain.py b/src/backend/base/langflow/components/chains/LLMChain.py
index 9fba051db..0387b50f3 100644
--- a/src/backend/base/langflow/components/chains/LLMChain.py
+++ b/src/backend/base/langflow/components/chains/LLMChain.py
@@ -1,11 +1,11 @@
from typing import Optional
from langchain.chains.llm import LLMChain
-
-from langflow.field_typing import BaseLanguageModel, BaseMemory, Text
-from langflow.interface.custom.custom_component import CustomComponent
from langchain_core.prompts import PromptTemplate
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel, BaseMemory, Text
+
class LLMChainComponent(CustomComponent):
display_name = "LLMChain"
diff --git a/src/backend/base/langflow/components/chains/LLMCheckerChain.py b/src/backend/base/langflow/components/chains/LLMCheckerChain.py
index 04e6fe67d..f413081b1 100644
--- a/src/backend/base/langflow/components/chains/LLMCheckerChain.py
+++ b/src/backend/base/langflow/components/chains/LLMCheckerChain.py
@@ -1,7 +1,7 @@
from langchain.chains import LLMCheckerChain
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, Text
-from langflow.interface.custom.custom_component import CustomComponent
class LLMCheckerChainComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/chains/LLMMathChain.py b/src/backend/base/langflow/components/chains/LLMMathChain.py
index 0b7374da6..2bb573ef5 100644
--- a/src/backend/base/langflow/components/chains/LLMMathChain.py
+++ b/src/backend/base/langflow/components/chains/LLMMathChain.py
@@ -2,8 +2,8 @@ from typing import Optional
from langchain.chains import LLMChain, LLMMathChain
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, BaseMemory, Text
-from langflow.interface.custom.custom_component import CustomComponent
class LLMMathChainComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/chains/RetrievalQA.py b/src/backend/base/langflow/components/chains/RetrievalQA.py
index 7b6ba49e4..da77f89d4 100644
--- a/src/backend/base/langflow/components/chains/RetrievalQA.py
+++ b/src/backend/base/langflow/components/chains/RetrievalQA.py
@@ -3,8 +3,8 @@ from typing import Optional
from langchain.chains.retrieval_qa.base import RetrievalQA
from langchain_core.documents import Document
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py
index 75a9131f5..2e0fa4ced 100644
--- a/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py
+++ b/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py
@@ -3,8 +3,8 @@ from typing import Optional
from langchain.chains import RetrievalQAWithSourcesChain
from langchain_core.documents import Document
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text
-from langflow.interface.custom.custom_component import CustomComponent
class RetrievalQAWithSourcesChainComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/chains/SQLGenerator.py b/src/backend/base/langflow/components/chains/SQLGenerator.py
index 3b111ba71..a6ff0ee2f 100644
--- a/src/backend/base/langflow/components/chains/SQLGenerator.py
+++ b/src/backend/base/langflow/components/chains/SQLGenerator.py
@@ -5,8 +5,8 @@ from langchain_community.utilities.sql_database import SQLDatabase
from langchain_core.prompts import PromptTemplate
from langchain_core.runnables import Runnable
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, Text
-from langflow.interface.custom.custom_component import CustomComponent
class SQLGeneratorComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/data/APIRequest.py b/src/backend/base/langflow/components/data/APIRequest.py
index 9f1ca703c..f4cf476f0 100644
--- a/src/backend/base/langflow/components/data/APIRequest.py
+++ b/src/backend/base/langflow/components/data/APIRequest.py
@@ -3,7 +3,8 @@ import json
from typing import List, Optional
import httpx
-from langflow.interface.custom.custom_component import CustomComponent
+
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/data/Directory.py b/src/backend/base/langflow/components/data/Directory.py
index 87dc99287..4dfa51de3 100644
--- a/src/backend/base/langflow/components/data/Directory.py
+++ b/src/backend/base/langflow/components/data/Directory.py
@@ -1,7 +1,7 @@
from typing import Any, Dict, List, Optional
from langflow.base.data.utils import parallel_load_records, parse_text_file_to_record, retrieve_file_paths
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/data/File.py b/src/backend/base/langflow/components/data/File.py
index 70fe1dccc..5ebb94cff 100644
--- a/src/backend/base/langflow/components/data/File.py
+++ b/src/backend/base/langflow/components/data/File.py
@@ -2,7 +2,7 @@ from pathlib import Path
from typing import Any, Dict
from langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/data/URL.py b/src/backend/base/langflow/components/data/URL.py
index 2b286e126..f9e515205 100644
--- a/src/backend/base/langflow/components/data/URL.py
+++ b/src/backend/base/langflow/components/data/URL.py
@@ -2,7 +2,7 @@ from typing import Any, Dict
from langchain_community.document_loaders.web_base import WebBaseLoader
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py
index d4330d9e4..e43c144b1 100644
--- a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py
@@ -1,9 +1,9 @@
from typing import Optional
-from langchain.embeddings.base import Embeddings
from langchain_community.embeddings import BedrockEmbeddings
+from langchain_core.embeddings import Embeddings
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class AmazonBedrockEmeddingsComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py
index d8aec24cd..dd40d64d5 100644
--- a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py
@@ -1,7 +1,8 @@
-from langchain.embeddings.base import Embeddings
-from langchain_community.embeddings import AzureOpenAIEmbeddings
+from langchain_core.embeddings import Embeddings
+from langchain_openai import AzureOpenAIEmbeddings
+from pydantic.v1 import SecretStr
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class AzureOpenAIEmbeddingsComponent(CustomComponent):
@@ -52,12 +53,16 @@ class AzureOpenAIEmbeddingsComponent(CustomComponent):
api_version: str,
api_key: str,
) -> Embeddings:
+ if api_key:
+ azure_api_key = SecretStr(api_key)
+ else:
+ azure_api_key = None
try:
embeddings = AzureOpenAIEmbeddings(
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
api_version=api_version,
- api_key=api_key,
+ api_key=azure_api_key,
)
except Exception as e:
diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py
index 849cd9bba..720dfa97f 100644
--- a/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py
@@ -2,7 +2,7 @@ from typing import Dict, Optional
from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class HuggingFaceEmbeddingsComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py
index 37a08ccc6..503a6a25a 100644
--- a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py
@@ -3,7 +3,7 @@ from typing import Dict, Optional
from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings
from pydantic.v1.types import SecretStr
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py
index 23d80132f..d24c9fb30 100644
--- a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py
@@ -1,7 +1,7 @@
+from langchain_mistralai.embeddings import MistralAIEmbeddings
from pydantic.v1 import SecretStr
-from langchain_mistralai.embeddings import MistralAIEmbeddings
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings
diff --git a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py
index 63ddc6fd4..8aad24735 100644
--- a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py
@@ -1,9 +1,9 @@
from typing import Optional
-from langchain.embeddings.base import Embeddings
from langchain_community.embeddings import OllamaEmbeddings
+from langchain_core.embeddings import Embeddings
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class OllamaEmbeddingsComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py
index 0b8959101..2ff78d562 100644
--- a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py
@@ -1,10 +1,10 @@
-from typing import Any, Dict, List, Optional
+from typing import Dict, List, Optional
from langchain_openai.embeddings.base import OpenAIEmbeddings
from pydantic.v1 import SecretStr
+from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings, NestedDict
-from langflow.interface.custom.custom_component import CustomComponent
class OpenAIEmbeddingsComponent(CustomComponent):
@@ -94,7 +94,6 @@ class OpenAIEmbeddingsComponent(CustomComponent):
allowed_special: List[str] = [],
disallowed_special: List[str] = ["all"],
chunk_size: int = 1000,
- client: Optional[Any] = None,
deployment: str = "text-embedding-ada-002",
embedding_ctx_length: int = 8191,
max_retries: int = 6,
@@ -126,7 +125,6 @@ class OpenAIEmbeddingsComponent(CustomComponent):
allowed_special=set(allowed_special),
disallowed_special="all",
chunk_size=chunk_size,
- client=client,
deployment=deployment,
embedding_ctx_length=embedding_ctx_length,
max_retries=max_retries,
diff --git a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py
index 4bfaa0003..c0d249326 100644
--- a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py
+++ b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py
@@ -2,7 +2,7 @@ from typing import List, Optional
from langchain_google_vertexai import VertexAIEmbeddings
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class VertexAIEmbeddingsComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/experimental/ClearMessageHistory.py b/src/backend/base/langflow/components/experimental/ClearMessageHistory.py
index 3c62b7ea3..dacfaccb4 100644
--- a/src/backend/base/langflow/components/experimental/ClearMessageHistory.py
+++ b/src/backend/base/langflow/components/experimental/ClearMessageHistory.py
@@ -1,4 +1,4 @@
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.memory import delete_messages, get_messages
diff --git a/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py b/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py
index 5d816f112..b1d6ecd40 100644
--- a/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py
+++ b/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py
@@ -1,4 +1,4 @@
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/experimental/ListFlows.py b/src/backend/base/langflow/components/experimental/ListFlows.py
index c7b421d15..07b4a4bbc 100644
--- a/src/backend/base/langflow/components/experimental/ListFlows.py
+++ b/src/backend/base/langflow/components/experimental/ListFlows.py
@@ -1,6 +1,6 @@
from typing import List
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/experimental/Listen.py b/src/backend/base/langflow/components/experimental/Listen.py
index cab979f70..be7ddb8e3 100644
--- a/src/backend/base/langflow/components/experimental/Listen.py
+++ b/src/backend/base/langflow/components/experimental/Listen.py
@@ -1,4 +1,4 @@
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/experimental/MergeRecords.py b/src/backend/base/langflow/components/experimental/MergeRecords.py
index 60e5ffe20..c938b4473 100644
--- a/src/backend/base/langflow/components/experimental/MergeRecords.py
+++ b/src/backend/base/langflow/components/experimental/MergeRecords.py
@@ -1,4 +1,4 @@
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/experimental/Notify.py b/src/backend/base/langflow/components/experimental/Notify.py
index 9af7f8ec6..bf4391682 100644
--- a/src/backend/base/langflow/components/experimental/Notify.py
+++ b/src/backend/base/langflow/components/experimental/Notify.py
@@ -1,6 +1,6 @@
from typing import Optional
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/experimental/Pass.py b/src/backend/base/langflow/components/experimental/Pass.py
index 9528f0591..3fdb438a0 100644
--- a/src/backend/base/langflow/components/experimental/Pass.py
+++ b/src/backend/base/langflow/components/experimental/Pass.py
@@ -1,7 +1,8 @@
from typing import Union
-from langflow.interface.custom.custom_component import CustomComponent
-from langflow.schema import Record
+
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
+from langflow.schema import Record
class PassComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/experimental/PythonFunction.py b/src/backend/base/langflow/components/experimental/PythonFunction.py
index 28e902abe..d832e2f5c 100644
--- a/src/backend/base/langflow/components/experimental/PythonFunction.py
+++ b/src/backend/base/langflow/components/experimental/PythonFunction.py
@@ -1,8 +1,8 @@
from typing import Callable
+from langflow.custom import CustomComponent
+from langflow.custom.utils import get_function
from langflow.field_typing import Code
-from langflow.interface.custom.custom_component import CustomComponent
-from langflow.interface.custom.utils import get_function
class PythonFunctionComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/experimental/RunnableExecutor.py b/src/backend/base/langflow/components/experimental/RunnableExecutor.py
index 0a3593a66..82260b76b 100644
--- a/src/backend/base/langflow/components/experimental/RunnableExecutor.py
+++ b/src/backend/base/langflow/components/experimental/RunnableExecutor.py
@@ -1,7 +1,7 @@
from langchain_core.runnables import Runnable
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
-from langflow.interface.custom.custom_component import CustomComponent
class RunnableExecComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/experimental/SQLExecutor.py b/src/backend/base/langflow/components/experimental/SQLExecutor.py
index a62ac187b..e03314514 100644
--- a/src/backend/base/langflow/components/experimental/SQLExecutor.py
+++ b/src/backend/base/langflow/components/experimental/SQLExecutor.py
@@ -1,8 +1,8 @@
from langchain_community.tools.sql_database.tool import QuerySQLDataBaseTool
from langchain_community.utilities import SQLDatabase
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
-from langflow.interface.custom.custom_component import CustomComponent
class SQLExecutorComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/experimental/SplitText.py b/src/backend/base/langflow/components/experimental/SplitText.py
index bd2bc921f..5d71c8014 100644
--- a/src/backend/base/langflow/components/experimental/SplitText.py
+++ b/src/backend/base/langflow/components/experimental/SplitText.py
@@ -1,7 +1,7 @@
from typing import Optional
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record
from langflow.utils.util import unescape_string
diff --git a/src/backend/base/langflow/components/experimental/StoreMessage.py b/src/backend/base/langflow/components/experimental/StoreMessage.py
index 9f3aa60e2..761646188 100644
--- a/src/backend/base/langflow/components/experimental/StoreMessage.py
+++ b/src/backend/base/langflow/components/experimental/StoreMessage.py
@@ -1,6 +1,6 @@
from typing import List, Optional
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.memory import get_messages, store_message
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/experimental/TextOperator.py b/src/backend/base/langflow/components/experimental/TextOperator.py
index 89fba13b6..ea79e92e7 100644
--- a/src/backend/base/langflow/components/experimental/TextOperator.py
+++ b/src/backend/base/langflow/components/experimental/TextOperator.py
@@ -1,8 +1,8 @@
from typing import Optional, Union
-from langflow.interface.custom.custom_component import CustomComponent
-from langflow.schema import Record
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
+from langflow.schema import Record
class TextOperatorComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/helpers/CombineText.py b/src/backend/base/langflow/components/helpers/CombineText.py
index fcd23c188..bedc4293d 100644
--- a/src/backend/base/langflow/components/helpers/CombineText.py
+++ b/src/backend/base/langflow/components/helpers/CombineText.py
@@ -1,4 +1,4 @@
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
diff --git a/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py b/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py
index 20cd4db29..67d315739 100644
--- a/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py
+++ b/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py
@@ -1,4 +1,4 @@
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
diff --git a/src/backend/base/langflow/components/helpers/CustomComponent.py b/src/backend/base/langflow/components/helpers/CustomComponent.py
index ce3d8c62c..7313323a9 100644
--- a/src/backend/base/langflow/components/helpers/CustomComponent.py
+++ b/src/backend/base/langflow/components/helpers/CustomComponent.py
@@ -1,6 +1,6 @@
# from langflow.field_typing import Data
+from langflow.custom import CustomComponent
from langflow.schema import Record
-from langflow.interface.custom.custom_component import CustomComponent
class Component(CustomComponent):
diff --git a/src/backend/base/langflow/components/helpers/DocumentToRecord.py b/src/backend/base/langflow/components/helpers/DocumentToRecord.py
index 362c0a9c1..5adaf7ab4 100644
--- a/src/backend/base/langflow/components/helpers/DocumentToRecord.py
+++ b/src/backend/base/langflow/components/helpers/DocumentToRecord.py
@@ -2,7 +2,7 @@ from typing import List
from langchain_core.documents import Document
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/helpers/IDGenerator.py b/src/backend/base/langflow/components/helpers/IDGenerator.py
index d10574eef..1e4e223b1 100644
--- a/src/backend/base/langflow/components/helpers/IDGenerator.py
+++ b/src/backend/base/langflow/components/helpers/IDGenerator.py
@@ -1,7 +1,7 @@
import uuid
from typing import Any, Optional
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class UUIDGeneratorComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/helpers/MessageHistory.py b/src/backend/base/langflow/components/helpers/MessageHistory.py
index 0f208e6eb..221d90c4e 100644
--- a/src/backend/base/langflow/components/helpers/MessageHistory.py
+++ b/src/backend/base/langflow/components/helpers/MessageHistory.py
@@ -1,6 +1,6 @@
from typing import List, Optional
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.memory import get_messages
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/helpers/RecordsToText.py b/src/backend/base/langflow/components/helpers/RecordsToText.py
index 8f4fed311..d3e418792 100644
--- a/src/backend/base/langflow/components/helpers/RecordsToText.py
+++ b/src/backend/base/langflow/components/helpers/RecordsToText.py
@@ -1,6 +1,6 @@
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
from langflow.helpers.record import records_to_text
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/helpers/UpdateRecord.py b/src/backend/base/langflow/components/helpers/UpdateRecord.py
index 9f165e146..e3153d6d7 100644
--- a/src/backend/base/langflow/components/helpers/UpdateRecord.py
+++ b/src/backend/base/langflow/components/helpers/UpdateRecord.py
@@ -1,4 +1,4 @@
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/inputs/Prompt.py b/src/backend/base/langflow/components/inputs/Prompt.py
index 3b44a6d12..2c76e6132 100644
--- a/src/backend/base/langflow/components/inputs/Prompt.py
+++ b/src/backend/base/langflow/components/inputs/Prompt.py
@@ -1,7 +1,7 @@
from langchain_core.prompts import PromptTemplate
+from langflow.custom import CustomComponent
from langflow.field_typing import Prompt, TemplateField, Text
-from langflow.interface.custom.custom_component import CustomComponent
class PromptComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py
index 23eebee10..848d10985 100644
--- a/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py
+++ b/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py
@@ -3,7 +3,7 @@
# We need to make sure this class is importable from the context where this code will be running.
from langchain_community.utilities.bing_search import BingSearchAPIWrapper
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class BingSearchAPIWrapperComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py
index 3f2f67faf..5e45219cc 100644
--- a/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py
+++ b/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py
@@ -2,7 +2,7 @@ from typing import Callable, Union
from langchain_community.utilities.google_search import GoogleSearchAPIWrapper
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class GoogleSearchAPIWrapperComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py
index e70e85cf4..2b9a49458 100644
--- a/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py
+++ b/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py
@@ -4,7 +4,7 @@ from typing import Dict
# If this class does not exist, you would need to create it or import the appropriate class from another module
from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class GoogleSerperAPIWrapperComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py
index df783470a..c0300cff0 100644
--- a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py
+++ b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py
@@ -13,7 +13,7 @@
from langchain_core.documents import Document
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.services.database.models.base import orjson_dumps
diff --git a/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py b/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py
index 7bbbdb870..93c46087d 100644
--- a/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py
+++ b/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py
@@ -1,6 +1,6 @@
from langchain_experimental.sql.base import SQLDatabase
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class SQLDatabaseComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py b/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py
index d255f34b7..4fe0706f8 100644
--- a/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py
+++ b/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py
@@ -2,7 +2,7 @@ from typing import Dict, Optional
from langchain_community.utilities.searx_search import SearxSearchWrapper
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class SearxSearchWrapperComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py
index c64a26e79..d8aa404cb 100644
--- a/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py
+++ b/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py
@@ -2,7 +2,7 @@ from typing import Callable, Union
from langchain_community.utilities.serpapi import SerpAPIWrapper
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class SerpAPIWrapperComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py
index 144792315..1c10dd4bd 100644
--- a/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py
+++ b/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py
@@ -2,7 +2,7 @@ from typing import Callable, Union
from langchain_community.utilities.wikipedia import WikipediaAPIWrapper
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
# Assuming WikipediaAPIWrapper is a class that needs to be imported.
# The import statement is not included as it is not provided in the JSON
diff --git a/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py
index bc224e83c..42be1f199 100644
--- a/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py
+++ b/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py
@@ -2,7 +2,7 @@ from typing import Callable, Union
from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
# Since all the fields in the JSON have show=False, we will only create a basic component
# without any configurable fields.
diff --git a/src/backend/base/langflow/components/memories/AstraDBMessageReader.py b/src/backend/base/langflow/components/memories/AstraDBMessageReader.py
index 9b82dd308..bbb732f16 100644
--- a/src/backend/base/langflow/components/memories/AstraDBMessageReader.py
+++ b/src/backend/base/langflow/components/memories/AstraDBMessageReader.py
@@ -51,9 +51,7 @@ class AstraDBMessageReaderComponent(BaseMemoryComponent):
Returns:
list[Record]: A list of Record objects representing the search results.
"""
- memory: AstraDBChatMessageHistory = cast(
- AstraDBChatMessageHistory, kwargs.get("memory")
- )
+ memory: AstraDBChatMessageHistory = cast(AstraDBChatMessageHistory, kwargs.get("memory"))
if not memory:
raise ValueError("AstraDBChatMessageHistory instance is required.")
@@ -72,9 +70,7 @@ class AstraDBMessageReaderComponent(BaseMemoryComponent):
namespace: Optional[str] = None,
) -> list[Record]:
try:
- from langchain_community.chat_message_histories.astradb import (
- AstraDBChatMessageHistory,
- )
+ pass
except ImportError:
raise ImportError(
"Could not import langchain Astra DB integration package. "
diff --git a/src/backend/base/langflow/components/memories/AstraDBMessageWriter.py b/src/backend/base/langflow/components/memories/AstraDBMessageWriter.py
index 33525656e..265f60cf4 100644
--- a/src/backend/base/langflow/components/memories/AstraDBMessageWriter.py
+++ b/src/backend/base/langflow/components/memories/AstraDBMessageWriter.py
@@ -5,7 +5,7 @@ from langflow.field_typing import Text
from langflow.schema.schema import Record
from langchain_core.messages import BaseMessage
-from langchain_community.chat_message_histories.astradb import AstraDBChatMessageHistory
+from langchain_astradb import AstraDBChatMessageHistory
class AstraDBMessageWriterComponent(BaseMemoryComponent):
@@ -74,13 +74,15 @@ class AstraDBMessageWriterComponent(BaseMemoryComponent):
if memory is None:
raise ValueError("AstraDBChatMessageHistory instance is required.")
- text_list = [BaseMessage(
- content=text,
- sender=sender,
- sender_name=sender_name,
- metadata=metadata,
- session_id=session_id,
- )]
+ text_list = [
+ BaseMessage(
+ content=text,
+ sender=sender,
+ sender_name=sender_name,
+ metadata=metadata,
+ session_id=session_id,
+ )
+ ]
memory.add_messages(text_list)
@@ -94,9 +96,7 @@ class AstraDBMessageWriterComponent(BaseMemoryComponent):
namespace: Optional[str] = None,
) -> Record:
try:
- from langchain_community.chat_message_histories.astradb import (
- AstraDBChatMessageHistory,
- )
+ pass
except ImportError:
raise ImportError(
"Could not import langchain Astra DB integration package. "
diff --git a/src/backend/base/langflow/components/memories/ZepMessageReader.py b/src/backend/base/langflow/components/memories/ZepMessageReader.py
index bac6e9f1a..75b27091f 100644
--- a/src/backend/base/langflow/components/memories/ZepMessageReader.py
+++ b/src/backend/base/langflow/components/memories/ZepMessageReader.py
@@ -116,19 +116,18 @@ class ZepMessageReaderComponent(BaseMemoryComponent):
url: Optional[Text] = None,
api_key: Optional[Text] = None,
query: Optional[Text] = None,
- search_scope: SearchScope = SearchScope.messages,
- search_type: SearchType = SearchType.similarity,
+ search_scope: str = SearchScope.messages,
+ search_type: str = SearchType.similarity,
limit: Optional[int] = None,
) -> list[Record]:
try:
- from zep_python import ZepClient
- from zep_python.langchain import ZepChatMessageHistory
-
# Monkeypatch API_BASE_PATH to
# avoid 404
# This is a workaround for the local Zep instance
# cloud Zep works with v2
import zep_python.zep_client
+ from zep_python import ZepClient
+ from zep_python.langchain import ZepChatMessageHistory
zep_python.zep_client.API_BASE_PATH = api_base_path
except ImportError:
diff --git a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py
index ff36820f5..0e27e620f 100644
--- a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py
@@ -1,8 +1,9 @@
from typing import Optional
-from langflow.field_typing import BaseLanguageModel
+
from langchain_community.llms.bedrock import Bedrock
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel
class AmazonBedrockComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py
index 016eaeb2d..786d558bb 100644
--- a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py
@@ -1,10 +1,10 @@
from typing import Optional
-from langchain.llms.base import BaseLanguageModel
from langchain_anthropic import ChatAnthropic
+from langchain_core.language_models import BaseLanguageModel
from pydantic.v1 import SecretStr
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class ChatAntropicSpecsComponent(CustomComponent):
@@ -35,8 +35,8 @@ class ChatAntropicSpecsComponent(CustomComponent):
},
"max_tokens": {
"display_name": "Max Tokens",
- "field_type": "int",
- "value": 256,
+ "advanced": True,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"temperature": {
"display_name": "Temperature",
diff --git a/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py b/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py
index 6f468bbed..947a1e2a3 100644
--- a/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py
@@ -1,9 +1,10 @@
from typing import Optional
-from langchain.llms.base import BaseLanguageModel
-from langchain_community.chat_models.azure_openai import AzureChatOpenAI
+from langchain_core.language_models import BaseLanguageModel
+from langchain_openai import AzureChatOpenAI
+from pydantic.v1 import SecretStr
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class AzureChatOpenAISpecsComponent(CustomComponent):
@@ -65,11 +66,8 @@ class AzureChatOpenAISpecsComponent(CustomComponent):
},
"max_tokens": {
"display_name": "Max Tokens",
- "value": 1000,
- "required": False,
- "field_type": "int",
"advanced": True,
- "info": "Maximum number of tokens to generate.",
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"code": {"show": False},
}
@@ -84,15 +82,19 @@ class AzureChatOpenAISpecsComponent(CustomComponent):
temperature: float = 0.7,
max_tokens: Optional[int] = 1000,
) -> BaseLanguageModel:
+ if api_key:
+ azure_api_key = SecretStr(api_key)
+ else:
+ azure_api_key = None
try:
llm = AzureChatOpenAI(
model=model,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
api_version=api_version,
- api_key=api_key,
+ api_key=azure_api_key,
temperature=temperature,
- max_tokens=max_tokens,
+ max_tokens=max_tokens or None,
)
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e
diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py
index a60fb9a64..a353410ad 100644
--- a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py
@@ -1,10 +1,11 @@
from typing import Optional
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
+
from pydantic.v1 import SecretStr
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
-from langflow.interface.custom.custom_component import CustomComponent
class QianfanChatEndpointComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py
index cc4ba3b9f..273bb5d98 100644
--- a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py
@@ -2,7 +2,7 @@ from typing import Optional
from langchain_community.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
diff --git a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py
index b03a9b737..7e4000d9b 100644
--- a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py
@@ -46,9 +46,8 @@ class AnthropicLLM(CustomComponent):
},
"max_tokens": {
"display_name": "Max Tokens",
- "field_type": "int",
"advanced": True,
- "value": 256,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"temperature": {
"display_name": "Temperature",
diff --git a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py
index 840682f4d..b3bce849e 100644
--- a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py
@@ -1,8 +1,9 @@
from typing import Any, Dict, Optional
from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException
+
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
-from langflow.interface.custom.custom_component import CustomComponent
class ChatLiteLLMComponent(CustomComponent):
@@ -81,12 +82,9 @@ class ChatLiteLLMComponent(CustomComponent):
"default": 1,
},
"max_tokens": {
- "display_name": "Max tokens",
- "field_type": "int",
- "advanced": False,
- "required": False,
- "default": 256,
- "info": "The maximum number of tokens to generate for each chat completion.",
+ "display_name": "Max Tokens",
+ "advanced": True,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"max_retries": {
"display_name": "Max retries",
diff --git a/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py b/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py
index 90f94aacd..73bbc3220 100644
--- a/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py
@@ -77,7 +77,7 @@ class MistralAIModelComponent(CustomComponent):
output = ChatMistralAI(
model_name=model,
api_key=(SecretStr(mistral_api_key) if mistral_api_key else None),
- max_tokens=max_tokens,
+ max_tokens=max_tokens or None,
temperature=temperature,
endpoint=mistral_api_base,
)
diff --git a/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py b/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py
index 6afde420c..610f4d110 100644
--- a/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py
@@ -1,11 +1,11 @@
-from typing import Any, Dict, List, Optional
+from typing import Dict, List, Optional
# from langchain_community.chat_models import ChatOllama
from langchain_community.chat_models import ChatOllama
from langchain_core.language_models.chat_models import BaseChatModel
# from langchain.chat_models import ChatOllama
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
# from langchain.callbacks.manager import CallbackManager
@@ -182,7 +182,7 @@ class ChatOllamaComponent(CustomComponent):
num_ctx: Optional[int] = None,
num_gpu: Optional[int] = None,
format: Optional[str] = None,
- metadata: Optional[Dict[str, Any]] = None,
+ metadata: Optional[Dict] = None,
num_thread: Optional[int] = None,
repeat_penalty: Optional[float] = None,
stop: Optional[List[str]] = None,
diff --git a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py
index 75f893582..ff26d5923 100644
--- a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py
@@ -3,10 +3,9 @@ from typing import Optional
from langchain_openai import ChatOpenAI
from pydantic.v1 import SecretStr
-
from langflow.base.models.openai_constants import MODEL_NAMES
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, NestedDict
-from langflow.interface.custom.custom_component import CustomComponent
class ChatOpenAIComponent(CustomComponent):
@@ -18,8 +17,8 @@ class ChatOpenAIComponent(CustomComponent):
return {
"max_tokens": {
"display_name": "Max Tokens",
- "advanced": False,
- "required": False,
+ "advanced": True,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"model_kwargs": {
"display_name": "Model Kwargs",
@@ -52,7 +51,7 @@ class ChatOpenAIComponent(CustomComponent):
def build(
self,
- max_tokens: Optional[int] = 256,
+ max_tokens: Optional[int] = 0,
model_kwargs: NestedDict = {},
model_name: str = "gpt-4o",
openai_api_base: Optional[str] = None,
@@ -66,7 +65,7 @@ class ChatOpenAIComponent(CustomComponent):
else:
api_key = None
return ChatOpenAI(
- max_tokens=max_tokens,
+ max_tokens=max_tokens or None,
model_kwargs=model_kwargs,
model=model_name,
base_url=openai_api_base,
diff --git a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py
index f2c377546..0df7a0465 100644
--- a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py
@@ -1,10 +1,9 @@
-from typing import List, Optional
+from typing import Optional
from langchain_community.chat_models.vertexai import ChatVertexAI
-from langchain_core.messages.base import BaseMessage
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
-from langflow.interface.custom.custom_component import CustomComponent
class ChatVertexAIComponent(CustomComponent):
@@ -65,7 +64,6 @@ class ChatVertexAIComponent(CustomComponent):
self,
credentials: Optional[str],
project: str,
- examples: Optional[List[BaseMessage]] = [],
location: str = "us-central1",
max_output_tokens: int = 128,
model_name: str = "chat-bison",
@@ -76,7 +74,6 @@ class ChatVertexAIComponent(CustomComponent):
) -> BaseLanguageModel:
return ChatVertexAI(
credentials=credentials,
- examples=examples,
location=location,
max_output_tokens=max_output_tokens,
model_name=model_name,
diff --git a/src/backend/base/langflow/components/model_specs/CohereSpecs.py b/src/backend/base/langflow/components/model_specs/CohereSpecs.py
index eeda381a4..2e2a1fa7e 100644
--- a/src/backend/base/langflow/components/model_specs/CohereSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/CohereSpecs.py
@@ -1,7 +1,10 @@
-from langchain_community.llms.cohere import Cohere
-from langchain_core.language_models.base import BaseLanguageModel
+from typing import Optional
-from langflow.interface.custom.custom_component import CustomComponent
+from langchain_cohere import ChatCohere
+from langchain_core.language_models.base import BaseLanguageModel
+from pydantic.v1 import SecretStr
+
+from langflow.custom import CustomComponent
class CohereComponent(CustomComponent):
@@ -13,14 +16,22 @@ class CohereComponent(CustomComponent):
def build_config(self):
return {
"cohere_api_key": {"display_name": "Cohere API Key", "type": "password", "password": True},
- "max_tokens": {"display_name": "Max Tokens", "default": 256, "type": "int", "show": True},
+ "max_tokens": {
+ "display_name": "Max Tokens",
+ "advanced": True,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ },
"temperature": {"display_name": "Temperature", "default": 0.75, "type": "float", "show": True},
}
def build(
self,
cohere_api_key: str,
- max_tokens: int = 256,
+ max_tokens: Optional[int] = 256,
temperature: float = 0.75,
) -> BaseLanguageModel:
- return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) # type: ignore
+ if cohere_api_key:
+ api_key = SecretStr(cohere_api_key)
+ else:
+ api_key = None
+ return ChatCohere(cohere_api_key=api_key, max_tokens=max_tokens or None, temperature=temperature) # type: ignore
diff --git a/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py b/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py
index 8a3894292..534085938 100644
--- a/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py
@@ -3,8 +3,8 @@ from typing import Optional
from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore
from pydantic.v1.types import SecretStr
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, RangeSpec
-from langflow.interface.custom.custom_component import CustomComponent
class GoogleGenerativeAIComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
index c145105ea..4de68365f 100644
--- a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py
@@ -1,9 +1,9 @@
from typing import Optional
-from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel
class HuggingFaceEndpointsComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py
index 4ba5502d3..1c416f1b1 100644
--- a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py
+++ b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py
@@ -1,9 +1,9 @@
from typing import List, Optional
-from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.ollama import Ollama
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel
class OllamaLLM(CustomComponent):
diff --git a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py
index c9664408d..49b120d35 100644
--- a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py
+++ b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py
@@ -1,9 +1,9 @@
from typing import Dict, Optional
-from langflow.field_typing import BaseLanguageModel
from langchain_community.llms.vertexai import VertexAI
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel
class VertexAIComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/models/AnthropicModel.py b/src/backend/base/langflow/components/models/AnthropicModel.py
index 1e5a7aec5..cfe9ed900 100644
--- a/src/backend/base/langflow/components/models/AnthropicModel.py
+++ b/src/backend/base/langflow/components/models/AnthropicModel.py
@@ -49,9 +49,8 @@ class AnthropicLLM(LCModelComponent):
},
"max_tokens": {
"display_name": "Max Tokens",
- "field_type": "int",
"advanced": True,
- "value": 256,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"temperature": {
"display_name": "Temperature",
diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py
index a2ee20b28..c296a8fae 100644
--- a/src/backend/base/langflow/components/models/AzureOpenAIModel.py
+++ b/src/backend/base/langflow/components/models/AzureOpenAIModel.py
@@ -74,9 +74,8 @@ class AzureChatOpenAIComponent(LCModelComponent):
},
"max_tokens": {
"display_name": "Max Tokens",
- "value": 1000,
"advanced": True,
- "info": "Maximum number of tokens to generate.",
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"code": {"show": False},
"input_value": {"display_name": "Input"},
@@ -117,7 +116,7 @@ class AzureChatOpenAIComponent(LCModelComponent):
api_version=api_version,
api_key=secret_api_key,
temperature=temperature,
- max_tokens=max_tokens,
+ max_tokens=max_tokens or None,
)
except Exception as e:
raise ValueError("Could not connect to AzureOpenAI API.") from e
diff --git a/src/backend/base/langflow/components/models/ChatLiteLLMModel.py b/src/backend/base/langflow/components/models/ChatLiteLLMModel.py
index 95574f0a5..054b59d12 100644
--- a/src/backend/base/langflow/components/models/ChatLiteLLMModel.py
+++ b/src/backend/base/langflow/components/models/ChatLiteLLMModel.py
@@ -93,9 +93,7 @@ class ChatLiteLLMModelComponent(LCModelComponent):
},
"max_tokens": {
"display_name": "Max tokens",
- "field_type": "int",
"advanced": False,
- "required": False,
"default": 256,
"info": "The maximum number of tokens to generate for each chat completion.",
},
diff --git a/src/backend/base/langflow/components/models/CohereModel.py b/src/backend/base/langflow/components/models/CohereModel.py
index 665aacc13..3bd12c095 100644
--- a/src/backend/base/langflow/components/models/CohereModel.py
+++ b/src/backend/base/langflow/components/models/CohereModel.py
@@ -1,10 +1,10 @@
from typing import Optional
-from langchain_community.chat_models.cohere import ChatCohere
from pydantic.v1 import SecretStr
from langflow.field_typing import Text
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
+from langchain_cohere import ChatCohere
class CohereComponent(LCModelComponent):
@@ -34,9 +34,7 @@ class CohereComponent(LCModelComponent):
"max_tokens": {
"display_name": "Max Tokens",
"advanced": True,
- "default": 256,
- "type": "int",
- "show": True,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"temperature": {
"display_name": "Temperature",
diff --git a/src/backend/base/langflow/components/models/MistralModel.py b/src/backend/base/langflow/components/models/MistralModel.py
index b8834b314..305a45e4b 100644
--- a/src/backend/base/langflow/components/models/MistralModel.py
+++ b/src/backend/base/langflow/components/models/MistralModel.py
@@ -31,6 +31,7 @@ class MistralAIModelComponent(LCModelComponent):
"max_tokens": {
"display_name": "Max Tokens",
"advanced": True,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"model_name": {
"display_name": "Model Name",
@@ -125,7 +126,7 @@ class MistralAIModelComponent(LCModelComponent):
api_key = None
chat_model = ChatMistralAI(
- max_tokens=max_tokens,
+ max_tokens=max_tokens or None,
model_name=model_name,
endpoint=mistral_api_base,
api_key=api_key,
diff --git a/src/backend/base/langflow/components/models/OllamaModel.py b/src/backend/base/langflow/components/models/OllamaModel.py
index 41d75ba3e..d9ba48501 100644
--- a/src/backend/base/langflow/components/models/OllamaModel.py
+++ b/src/backend/base/langflow/components/models/OllamaModel.py
@@ -1,4 +1,4 @@
-from typing import Any, Dict, List, Optional
+from typing import Dict, List, Optional
# from langchain_community.chat_models import ChatOllama
from langchain_community.chat_models import ChatOllama
@@ -229,7 +229,7 @@ class ChatOllamaComponent(LCModelComponent):
num_ctx: Optional[int] = None,
num_gpu: Optional[int] = None,
format: Optional[str] = None,
- metadata: Optional[Dict[str, Any]] = None,
+ metadata: Optional[Dict] = None,
num_thread: Optional[int] = None,
repeat_penalty: Optional[float] = None,
stop: Optional[List[str]] = None,
diff --git a/src/backend/base/langflow/components/models/OpenAIModel.py b/src/backend/base/langflow/components/models/OpenAIModel.py
index 149c93ad3..7adaf7a92 100644
--- a/src/backend/base/langflow/components/models/OpenAIModel.py
+++ b/src/backend/base/langflow/components/models/OpenAIModel.py
@@ -32,6 +32,7 @@ class OpenAIModelComponent(LCModelComponent):
"max_tokens": {
"display_name": "Max Tokens",
"advanced": True,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
},
"model_kwargs": {
"display_name": "Model Kwargs",
@@ -93,7 +94,7 @@ class OpenAIModelComponent(LCModelComponent):
api_key = None
output = ChatOpenAI(
- max_tokens=max_tokens,
+ max_tokens=max_tokens or None,
model_kwargs=model_kwargs,
model=model_name,
base_url=openai_api_base,
diff --git a/src/backend/base/langflow/components/models/VertexAiModel.py b/src/backend/base/langflow/components/models/VertexAiModel.py
index ff520e0d1..a992447f4 100644
--- a/src/backend/base/langflow/components/models/VertexAiModel.py
+++ b/src/backend/base/langflow/components/models/VertexAiModel.py
@@ -1,6 +1,5 @@
-from typing import List, Optional
+from typing import Optional
-from langchain_core.messages.base import BaseMessage
from langflow.base.constants import STREAM_INFO_TEXT
from langflow.base.models.model import LCModelComponent
@@ -93,7 +92,6 @@ class ChatVertexAIComponent(LCModelComponent):
input_value: Text,
credentials: Optional[str],
project: str,
- examples: Optional[List[BaseMessage]] = [],
location: str = "us-central1",
max_output_tokens: int = 128,
model_name: str = "chat-bison",
@@ -112,7 +110,6 @@ class ChatVertexAIComponent(LCModelComponent):
)
output = ChatVertexAI(
credentials=credentials,
- examples=examples,
location=location,
max_output_tokens=max_output_tokens,
model_name=model_name,
diff --git a/src/backend/base/langflow/components/retrievers/AmazonKendra.py b/src/backend/base/langflow/components/retrievers/AmazonKendra.py
index 6584f6545..23ab9191a 100644
--- a/src/backend/base/langflow/components/retrievers/AmazonKendra.py
+++ b/src/backend/base/langflow/components/retrievers/AmazonKendra.py
@@ -1,9 +1,9 @@
from typing import Optional
-from langchain.schema import BaseRetriever
from langchain_community.retrievers import AmazonKendraRetriever
+from langchain_core.retrievers import BaseRetriever
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class AmazonKendraRetrieverComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/retrievers/MetalRetriever.py b/src/backend/base/langflow/components/retrievers/MetalRetriever.py
index c5c56a397..55fbcff0d 100644
--- a/src/backend/base/langflow/components/retrievers/MetalRetriever.py
+++ b/src/backend/base/langflow/components/retrievers/MetalRetriever.py
@@ -1,10 +1,10 @@
from typing import Optional
-from langchain.schema import BaseRetriever
from langchain_community.retrievers import MetalRetriever
+from langchain_core.retrievers import BaseRetriever
from metal_sdk.metal import Metal # type: ignore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class MetalRetrieverComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py
index 8dd6d8579..d9197ece2 100644
--- a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py
+++ b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py
@@ -2,8 +2,8 @@ from typing import Optional
from langchain.retrievers import MultiQueryRetriever
-from langflow.field_typing import BaseRetriever, PromptTemplate, BaseLanguageModel
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel, BaseRetriever, PromptTemplate, Text
class MultiQueryRetrieverComponent(CustomComponent):
@@ -41,10 +41,13 @@ class MultiQueryRetrieverComponent(CustomComponent):
self,
llm: BaseLanguageModel,
retriever: BaseRetriever,
- prompt: Optional[PromptTemplate] = None,
+ prompt: Optional[Text] = None,
parser_key: str = "lines",
) -> MultiQueryRetriever:
if not prompt:
return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key)
else:
- return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, prompt=prompt, parser_key=parser_key)
+ prompt_template = PromptTemplate.from_template(prompt)
+ return MultiQueryRetriever.from_llm(
+ llm=llm, retriever=retriever, prompt=prompt_template, parser_key=parser_key
+ )
diff --git a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py
index 759021487..0c5c4fff5 100644
--- a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py
+++ b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py
@@ -1,13 +1,13 @@
import json
from typing import List
-from langchain.base_language import BaseLanguageModel
from langchain.chains.query_constructor.base import AttributeInfo
from langchain.retrievers.self_query.base import SelfQueryRetriever
-from langchain.schema import BaseRetriever
-from langchain.schema.vectorstore import VectorStore
+from langchain_core.language_models import BaseLanguageModel
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class VectaraSelfQueryRetriverComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py b/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py
index 43f1aab71..6460e0458 100644
--- a/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py
+++ b/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py
@@ -1,7 +1,7 @@
from langchain_core.vectorstores import VectorStoreRetriever
+from langflow.custom import CustomComponent
from langflow.field_typing import VectorStore
-from langflow.interface.custom.custom_component import CustomComponent
class VectoStoreRetrieverComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py
index 2a0f3686f..ee340ab26 100644
--- a/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py
+++ b/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py
@@ -1,8 +1,8 @@
from typing import List
-from langchain.text_splitter import CharacterTextSplitter
+from langchain_text_splitters import CharacterTextSplitter
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema.schema import Record
from langflow.utils.util import unescape_string
diff --git a/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py b/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py
index 1a4ae24a1..7ef7d5c24 100644
--- a/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py
+++ b/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py
@@ -1,8 +1,8 @@
from typing import List, Optional
-from langchain.text_splitter import Language
+from langchain_text_splitters import Language, RecursiveCharacterTextSplitter
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema.schema import Record
@@ -61,7 +61,6 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent):
Returns:
list[str]: The chunks of text.
"""
- from langchain.text_splitter import RecursiveCharacterTextSplitter
# Make sure chunk_size and chunk_overlap are ints
if isinstance(chunk_size, str):
diff --git a/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py
index 1ceaa8bd6..77fcfa62a 100644
--- a/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py
+++ b/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py
@@ -1,9 +1,9 @@
from typing import Optional
-from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_core.documents import Document
+from langchain_text_splitters import RecursiveCharacterTextSplitter
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema import Record
from langflow.utils.util import build_loader_repr_from_records, unescape_string
diff --git a/src/backend/base/langflow/components/toolkits/JsonToolkit.py b/src/backend/base/langflow/components/toolkits/JsonToolkit.py
index 72fe17cde..09a613336 100644
--- a/src/backend/base/langflow/components/toolkits/JsonToolkit.py
+++ b/src/backend/base/langflow/components/toolkits/JsonToolkit.py
@@ -1,7 +1,10 @@
+from pathlib import Path
+
+import yaml
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
from langchain_community.tools.json.tool import JsonSpec
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class JsonToolkitComponent(CustomComponent):
@@ -10,8 +13,17 @@ class JsonToolkitComponent(CustomComponent):
def build_config(self):
return {
- "spec": {"display_name": "Spec", "type": JsonSpec},
+ "path": {
+ "display_name": "Path",
+ "field_type": "file",
+ "file_types": ["json", "yaml", "yml"],
+ },
}
- def build(self, spec: JsonSpec) -> JsonToolkit:
+ def build(self, path: str) -> JsonToolkit:
+ if path.endswith("yaml") or path.endswith("yml"):
+ yaml_dict = yaml.load(open(path, "r"), Loader=yaml.FullLoader)
+ spec = JsonSpec(dict_=yaml_dict)
+ else:
+ spec = JsonSpec.from_file(Path(path))
return JsonToolkit(spec=spec)
diff --git a/src/backend/base/langflow/components/toolkits/Metaphor.py b/src/backend/base/langflow/components/toolkits/Metaphor.py
index 14962924f..9ebd4f771 100644
--- a/src/backend/base/langflow/components/toolkits/Metaphor.py
+++ b/src/backend/base/langflow/components/toolkits/Metaphor.py
@@ -1,11 +1,10 @@
from typing import List, Union
-from langchain.agents import tool
-from langchain.agents.agent_toolkits.base import BaseToolkit
-from langchain.tools import Tool
+from langchain_community.agent_toolkits.base import BaseToolkit
+from langchain_core.tools import Tool, tool
from metaphor_python import Metaphor # type: ignore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class MetaphorToolkit(CustomComponent):
diff --git a/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py b/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py
index b29feb291..a24798cef 100644
--- a/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py
+++ b/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py
@@ -1,8 +1,12 @@
+from pathlib import Path
+
+import yaml
from langchain_community.agent_toolkits.openapi.toolkit import BaseToolkit, OpenAPIToolkit
+from langchain_community.tools.json.tool import JsonSpec
from langchain_community.utilities.requests import TextRequestsWrapper
-from langflow.field_typing import AgentExecutor
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
+from langflow.field_typing import BaseLanguageModel
class OpenAPIToolkitComponent(CustomComponent):
@@ -15,9 +19,16 @@ class OpenAPIToolkitComponent(CustomComponent):
"requests_wrapper": {"display_name": "Text Requests Wrapper"},
}
- def build(
- self,
- json_agent: AgentExecutor,
- requests_wrapper: TextRequestsWrapper,
- ) -> BaseToolkit:
- return OpenAPIToolkit(json_agent=json_agent, requests_wrapper=requests_wrapper)
+ def build(self, llm: BaseLanguageModel, path: str, allow_dangerous_requests: bool = False) -> BaseToolkit:
+ if path.endswith("yaml") or path.endswith("yml"):
+ yaml_dict = yaml.load(open(path, "r"), Loader=yaml.FullLoader)
+ spec = JsonSpec(dict_=yaml_dict)
+ else:
+ spec = JsonSpec.from_file(Path(path))
+ requests_wrapper = TextRequestsWrapper()
+ return OpenAPIToolkit.from_llm(
+ llm=llm,
+ json_spec=spec,
+ requests_wrapper=requests_wrapper,
+ allow_dangerous_requests=allow_dangerous_requests,
+ )
diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py
index 626a14fd8..60bd6598e 100644
--- a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py
+++ b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py
@@ -1,7 +1,7 @@
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo
-from langchain_community.vectorstores import VectorStore
+from langchain_core.vectorstores import VectorStore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class VectorStoreInfoComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py
index 13ec1656f..13fff14a2 100644
--- a/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py
+++ b/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py
@@ -2,8 +2,8 @@ from typing import List, Union
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo, VectorStoreRouterToolkit
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, Tool
-from langflow.interface.custom.custom_component import CustomComponent
class VectorStoreRouterToolkitComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py
index 8436ba58e..2f788fcb9 100644
--- a/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py
+++ b/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py
@@ -2,8 +2,8 @@ from typing import Union
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo, VectorStoreToolkit
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel, Tool
-from langflow.interface.custom.custom_component import CustomComponent
class VectorStoreToolkitComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/tools/PythonREPLTool.py b/src/backend/base/langflow/components/tools/PythonREPLTool.py
index 6cc7d8649..f2f3b4b52 100644
--- a/src/backend/base/langflow/components/tools/PythonREPLTool.py
+++ b/src/backend/base/langflow/components/tools/PythonREPLTool.py
@@ -1,10 +1,9 @@
import importlib
-
-from langchain.agents import Tool
from langchain_experimental.utilities import PythonREPL
from langflow.base.tools.base import build_status_from_tool
from langflow.custom import CustomComponent
+from langchain_core.tools import Tool
class PythonREPLToolComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/tools/RetrieverTool.py b/src/backend/base/langflow/components/tools/RetrieverTool.py
index 914ba3941..28829321e 100644
--- a/src/backend/base/langflow/components/tools/RetrieverTool.py
+++ b/src/backend/base/langflow/components/tools/RetrieverTool.py
@@ -1,7 +1,7 @@
from langchain.tools.retriever import create_retriever_tool
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseRetriever, Tool
-from langflow.interface.custom.custom_component import CustomComponent
class RetrieverToolComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py b/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py
index 3e4b7880d..0046063d1 100644
--- a/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py
+++ b/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py
@@ -1,7 +1,7 @@
from typing import List, Optional
-import chromadb # type: ignore
-from langchain_community.vectorstores.chroma import Chroma
+from chromadb.config import Settings
+from langchain_chroma import Chroma
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.field_typing import Embeddings, Text
@@ -39,7 +39,7 @@ class ChromaSearchComponent(LCVectorStoreComponent):
"advanced": True,
},
"chroma_server_host": {"display_name": "Server Host", "advanced": True},
- "chroma_server_port": {"display_name": "Server Port", "advanced": True},
+ "chroma_server_http_port": {"display_name": "Server HTTP Port", "advanced": True},
"chroma_server_grpc_port": {
"display_name": "Server gRPC Port",
"advanced": True,
@@ -64,38 +64,39 @@ class ChromaSearchComponent(LCVectorStoreComponent):
chroma_server_ssl_enabled: bool,
number_of_results: int = 4,
index_directory: Optional[str] = None,
- chroma_server_cors_allow_origins: Optional[str] = None,
+ chroma_server_cors_allow_origins: List[str] = [],
chroma_server_host: Optional[str] = None,
- chroma_server_port: Optional[int] = None,
+ chroma_server_http_port: Optional[int] = None,
chroma_server_grpc_port: Optional[int] = None,
) -> List[Record]:
"""
Builds the Vector Store or BaseRetriever object.
Args:
+ - input_value (Text): The input value.
+ - search_type (str): The type of search.
- collection_name (str): The name of the collection.
- - persist_directory (Optional[str]): The directory to persist the Vector Store to.
+ - embedding (Embeddings): The embeddings to use for the Vector Store.
- chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.
- - persist (bool): Whether to persist the Vector Store or not.
- - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.
- - documents (Optional[Document]): The documents to use for the Vector Store.
- - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.
- - chroma_server_host (Optional[str]): The host for the Chroma server.
- - chroma_server_port (Optional[int]): The port for the Chroma server.
- - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.
+ - number_of_results (int, optional): The number of results to retrieve. Defaults to 4.
+ - index_directory (str, optional): The directory to persist the Vector Store to. Defaults to None.
+ - chroma_server_cors_allow_origins (List[str], optional): The CORS allow origins for the Chroma server. Defaults to [].
+ - chroma_server_host (str, optional): The host for the Chroma server. Defaults to None.
+ - chroma_server_http_port (int, optional): The HTTP port for the Chroma server. Defaults to None.
+ - chroma_server_grpc_port (int, optional): The gRPC port for the Chroma server. Defaults to None.
Returns:
- - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.
+ - List[Record]: The list of records.
"""
# Chroma settings
chroma_settings = None
if chroma_server_host is not None:
- chroma_settings = chromadb.config.Settings(
- chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,
+ chroma_settings = Settings(
+ chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or [],
chroma_server_host=chroma_server_host,
- chroma_server_port=chroma_server_port or None,
+ chroma_server_http_port=chroma_server_http_port or None,
chroma_server_grpc_port=chroma_server_grpc_port or None,
chroma_server_ssl_enabled=chroma_server_ssl_enabled,
)
diff --git a/src/backend/base/langflow/components/vectorsearch/CouchbaseSearch.py b/src/backend/base/langflow/components/vectorsearch/CouchbaseSearch.py
index 0c8a815a4..2aa23c490 100644
--- a/src/backend/base/langflow/components/vectorsearch/CouchbaseSearch.py
+++ b/src/backend/base/langflow/components/vectorsearch/CouchbaseSearch.py
@@ -1,8 +1,8 @@
-from typing import List, Optional
+from typing import List
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Couchbase import CouchbaseComponent
-from langflow.field_typing import Embeddings, NestedDict, Text
+from langflow.field_typing import Embeddings, Text
from langflow.schema import Record
@@ -25,17 +25,13 @@ class CouchbaseSearchComponent(LCVectorStoreComponent):
return {
"input_value": {"display_name": "Input"},
"embedding": {"display_name": "Embedding"},
- "couchbase_connection_string": {"display_name": "Couchbase Cluster connection string","required": True},
- "couchbase_username": {"display_name": "Couchbase username","required": True},
- "couchbase_password": {
- "display_name": "Couchbase password",
- "password": True,
- "required": True
- },
- "bucket_name": {"display_name": "Bucket Name","required": True},
- "scope_name": {"display_name": "Scope Name","required": True},
- "collection_name": {"display_name": "Collection Name","required": True},
- "index_name": {"display_name": "Index Name","required": True},
+ "couchbase_connection_string": {"display_name": "Couchbase Cluster connection string", "required": True},
+ "couchbase_username": {"display_name": "Couchbase username", "required": True},
+ "couchbase_password": {"display_name": "Couchbase password", "password": True, "required": True},
+ "bucket_name": {"display_name": "Bucket Name", "required": True},
+ "scope_name": {"display_name": "Scope Name", "required": True},
+ "collection_name": {"display_name": "Collection Name", "required": True},
+ "index_name": {"display_name": "Index Name", "required": True},
"number_of_results": {
"display_name": "Number of Results",
"info": "Number of results to return.",
diff --git a/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py b/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py
index d4818f354..e995f86f8 100644
--- a/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py
+++ b/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py
@@ -26,7 +26,7 @@ class PineconeSearchComponent(PineconeComponent, LCVectorStoreComponent):
"input_value": {"display_name": "Input"},
"embedding": {"display_name": "Embedding"},
"index_name": {"display_name": "Index Name"},
- "namespace": {"display_name": "Namespace", "advanced": True},
+ "namespace": {"display_name": "Namespace", "info": "Namespace for the index."},
"distance_strategy": {
"display_name": "Distance Strategy",
# get values from enum
diff --git a/src/backend/base/langflow/components/vectorsearch/RedisSearch.py b/src/backend/base/langflow/components/vectorsearch/RedisSearch.py
index 25e71c64b..afe653f6e 100644
--- a/src/backend/base/langflow/components/vectorsearch/RedisSearch.py
+++ b/src/backend/base/langflow/components/vectorsearch/RedisSearch.py
@@ -1,11 +1,10 @@
from typing import List, Optional
-from langchain.embeddings.base import Embeddings
-
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Redis import RedisComponent
from langflow.field_typing import Text
from langflow.schema import Record
+from langchain_core.embeddings import Embeddings
class RedisSearchComponent(RedisComponent, LCVectorStoreComponent):
diff --git a/src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py b/src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py
index fd5ccd1aa..b51f65a55 100644
--- a/src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py
+++ b/src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py
@@ -1,11 +1,10 @@
from typing import List, Optional
-from langchain.embeddings.base import Embeddings
-
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.Weaviate import WeaviateVectorStoreComponent
from langflow.field_typing import Text
from langflow.schema import Record
+from langchain_core.embeddings import Embeddings
class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreComponent):
diff --git a/src/backend/base/langflow/components/vectorsearch/__init__.py b/src/backend/base/langflow/components/vectorsearch/__init__.py
index 4cdf5b83c..83ce34b26 100644
--- a/src/backend/base/langflow/components/vectorsearch/__init__.py
+++ b/src/backend/base/langflow/components/vectorsearch/__init__.py
@@ -9,7 +9,7 @@ from .SupabaseVectorStoreSearch import SupabaseSearchComponent
from .VectaraSearch import VectaraSearchComponent
from .WeaviateSearch import WeaviateSearchVectorStore
from .pgvectorSearch import PGVectorSearchComponent
-from .Couchbase import CouchbaseSearchComponent # type: ignore
+from .Couchbase import CouchbaseSearchComponent # type: ignore
__all__ = [
"AstraDBSearchComponent",
diff --git a/src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py b/src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py
index 9b074b5f6..c6bedfede 100644
--- a/src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py
+++ b/src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py
@@ -1,11 +1,10 @@
from typing import List
-from langchain.embeddings.base import Embeddings
-
from langflow.components.vectorstores.base.model import LCVectorStoreComponent
from langflow.components.vectorstores.pgvector import PGVectorComponent
from langflow.field_typing import Text
from langflow.schema import Record
+from langchain_core.embeddings import Embeddings
class PGVectorSearchComponent(PGVectorComponent, LCVectorStoreComponent):
diff --git a/src/backend/base/langflow/components/vectorstores/AstraDB.py b/src/backend/base/langflow/components/vectorstores/AstraDB.py
index 3425c3a4e..07ded028e 100644
--- a/src/backend/base/langflow/components/vectorstores/AstraDB.py
+++ b/src/backend/base/langflow/components/vectorstores/AstraDB.py
@@ -1,12 +1,11 @@
from typing import List, Optional, Union
-
-from langchain.schema import BaseRetriever
from langchain_astradb import AstraDBVectorStore
from langchain_astradb.utils.astradb import SetupMode
from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings, VectorStore
from langflow.schema import Record
+from langchain_core.retrievers import BaseRetriever
class AstraDBVectorStoreComponent(CustomComponent):
diff --git a/src/backend/base/langflow/components/vectorstores/Chroma.py b/src/backend/base/langflow/components/vectorstores/Chroma.py
index 8fe2f54a9..9683d7a98 100644
--- a/src/backend/base/langflow/components/vectorstores/Chroma.py
+++ b/src/backend/base/langflow/components/vectorstores/Chroma.py
@@ -1,12 +1,12 @@
from typing import List, Optional, Union
-import chromadb # type: ignore
-from langchain.embeddings.base import Embeddings
-from langchain.schema import BaseRetriever
-from langchain_community.vectorstores import VectorStore
-from langchain_community.vectorstores.chroma import Chroma
+from chromadb.config import Settings
+from langchain_chroma import Chroma
+from langchain_core.embeddings import Embeddings
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema.schema import Record
@@ -38,7 +38,7 @@ class ChromaComponent(CustomComponent):
"advanced": True,
},
"chroma_server_host": {"display_name": "Server Host", "advanced": True},
- "chroma_server_port": {"display_name": "Server Port", "advanced": True},
+ "chroma_server_http_port": {"display_name": "Server HTTP Port", "advanced": True},
"chroma_server_grpc_port": {
"display_name": "Server gRPC Port",
"advanced": True,
@@ -56,9 +56,9 @@ class ChromaComponent(CustomComponent):
chroma_server_ssl_enabled: bool,
index_directory: Optional[str] = None,
inputs: Optional[List[Record]] = None,
- chroma_server_cors_allow_origins: Optional[str] = None,
+ chroma_server_cors_allow_origins: List[str] = [],
chroma_server_host: Optional[str] = None,
- chroma_server_port: Optional[int] = None,
+ chroma_server_http_port: Optional[int] = None,
chroma_server_grpc_port: Optional[int] = None,
) -> Union[VectorStore, BaseRetriever]:
"""
@@ -66,13 +66,13 @@ class ChromaComponent(CustomComponent):
Args:
- collection_name (str): The name of the collection.
- - index_directory (Optional[str]): The directory to persist the Vector Store to.
+ - embedding (Embeddings): The embeddings to use for the Vector Store.
- chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.
- - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.
- - documents (Optional[Document]): The documents to use for the Vector Store.
- - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.
+ - index_directory (Optional[str]): The directory to persist the Vector Store to.
+ - inputs (Optional[List[Record]]): The input records to use for the Vector Store.
+ - chroma_server_cors_allow_origins (List[str]): The CORS allow origins for the Chroma server.
- chroma_server_host (Optional[str]): The host for the Chroma server.
- - chroma_server_port (Optional[int]): The port for the Chroma server.
+ - chroma_server_http_port (Optional[int]): The HTTP port for the Chroma server.
- chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.
Returns:
@@ -83,10 +83,10 @@ class ChromaComponent(CustomComponent):
chroma_settings = None
if chroma_server_host is not None:
- chroma_settings = chromadb.config.Settings(
- chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,
+ chroma_settings = Settings(
+ chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or [],
chroma_server_host=chroma_server_host,
- chroma_server_port=chroma_server_port or None,
+ chroma_server_http_port=chroma_server_http_port or None,
chroma_server_grpc_port=chroma_server_grpc_port or None,
chroma_server_ssl_enabled=chroma_server_ssl_enabled,
)
diff --git a/src/backend/base/langflow/components/vectorstores/Couchbase.py b/src/backend/base/langflow/components/vectorstores/Couchbase.py
index 1816e85fb..f99ac7d40 100644
--- a/src/backend/base/langflow/components/vectorstores/Couchbase.py
+++ b/src/backend/base/langflow/components/vectorstores/Couchbase.py
@@ -1,8 +1,6 @@
from typing import List, Optional, Union
-from langchain.schema import BaseRetriever
-
-from langchain_community.vectorstores import CouchbaseVectorStore
+from langchain_community.vectorstores import CouchbaseVectorStore
from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings, VectorStore
@@ -10,9 +8,10 @@ from langflow.schema import Record
from datetime import timedelta
-from couchbase.auth import PasswordAuthenticator # type: ignore
-from couchbase.cluster import Cluster # type: ignore
-from couchbase.options import ClusterOptions # type: ignore
+from couchbase.auth import PasswordAuthenticator # type: ignore
+from couchbase.cluster import Cluster # type: ignore
+from couchbase.options import ClusterOptions # type: ignore
+from langchain_core.retrievers import BaseRetriever
class CouchbaseComponent(CustomComponent):
@@ -34,17 +33,13 @@ class CouchbaseComponent(CustomComponent):
return {
"inputs": {"display_name": "Input", "input_types": ["Document", "Record"]},
"embedding": {"display_name": "Embedding"},
- "couchbase_connection_string": {"display_name": "Couchbase Cluster connection string","required": True},
- "couchbase_username": {"display_name": "Couchbase username","required": True},
- "couchbase_password": {
- "display_name": "Couchbase password",
- "password": True,
- "required": True
- },
- "bucket_name": {"display_name": "Bucket Name","required": True},
- "scope_name": {"display_name": "Scope Name","required": True},
- "collection_name": {"display_name": "Collection Name","required": True},
- "index_name": {"display_name": "Index Name","required": True},
+ "couchbase_connection_string": {"display_name": "Couchbase Cluster connection string", "required": True},
+ "couchbase_username": {"display_name": "Couchbase username", "required": True},
+ "couchbase_password": {"display_name": "Couchbase password", "password": True, "required": True},
+ "bucket_name": {"display_name": "Bucket Name", "required": True},
+ "scope_name": {"display_name": "Scope Name", "required": True},
+ "collection_name": {"display_name": "Collection Name", "required": True},
+ "index_name": {"display_name": "Index Name", "required": True},
}
def build(
diff --git a/src/backend/base/langflow/components/vectorstores/FAISS.py b/src/backend/base/langflow/components/vectorstores/FAISS.py
index ea9ee1c4d..9d9624919 100644
--- a/src/backend/base/langflow/components/vectorstores/FAISS.py
+++ b/src/backend/base/langflow/components/vectorstores/FAISS.py
@@ -1,11 +1,11 @@
from typing import List, Text, Union
-from langchain.schema import BaseRetriever
-from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.faiss import FAISS
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
+from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py
index 6c800957a..8c045a1bd 100644
--- a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py
+++ b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py
@@ -1,8 +1,9 @@
from typing import List, Optional
from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch
+
+from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/Pinecone.py b/src/backend/base/langflow/components/vectorstores/Pinecone.py
index b25bb6086..2bc0e2252 100644
--- a/src/backend/base/langflow/components/vectorstores/Pinecone.py
+++ b/src/backend/base/langflow/components/vectorstores/Pinecone.py
@@ -1,13 +1,13 @@
from typing import List, Optional, Union
-from langchain.schema import BaseRetriever
-from langchain_community.vectorstores import VectorStore
from langchain_core.documents import Document
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
from langchain_pinecone._utilities import DistanceStrategy
from langchain_pinecone.vectorstores import PineconeVectorStore
+from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/Qdrant.py b/src/backend/base/langflow/components/vectorstores/Qdrant.py
index e6b3ddbc9..dabaa17fc 100644
--- a/src/backend/base/langflow/components/vectorstores/Qdrant.py
+++ b/src/backend/base/langflow/components/vectorstores/Qdrant.py
@@ -1,11 +1,11 @@
from typing import Optional, Union
-from langchain.schema import BaseRetriever
-from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.qdrant import Qdrant
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
+from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/Redis.py b/src/backend/base/langflow/components/vectorstores/Redis.py
index ea1046037..04d137538 100644
--- a/src/backend/base/langflow/components/vectorstores/Redis.py
+++ b/src/backend/base/langflow/components/vectorstores/Redis.py
@@ -1,11 +1,11 @@
from typing import Optional, Union
-from langchain.embeddings.base import Embeddings
-from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.redis import Redis
+from langchain_core.embeddings import Embeddings
from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py
index df80b3699..5e87a09ca 100644
--- a/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py
+++ b/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py
@@ -1,12 +1,12 @@
from typing import List, Optional, Union
-from langchain.schema import BaseRetriever
-from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.supabase import SupabaseVectorStore
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
from supabase.client import Client, create_client
+from langflow.custom import CustomComponent
from langflow.field_typing import Embeddings
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/Vectara.py b/src/backend/base/langflow/components/vectorstores/Vectara.py
index 5c087875f..247614345 100644
--- a/src/backend/base/langflow/components/vectorstores/Vectara.py
+++ b/src/backend/base/langflow/components/vectorstores/Vectara.py
@@ -7,8 +7,8 @@ from langchain_community.embeddings import FakeEmbeddings
from langchain_community.vectorstores.vectara import Vectara
from langchain_core.vectorstores import VectorStore
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseRetriever
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/Weaviate.py b/src/backend/base/langflow/components/vectorstores/Weaviate.py
index 99ede77f7..e1a802000 100644
--- a/src/backend/base/langflow/components/vectorstores/Weaviate.py
+++ b/src/backend/base/langflow/components/vectorstores/Weaviate.py
@@ -1,12 +1,13 @@
from typing import Optional, Union
import weaviate # type: ignore
-from langchain.embeddings.base import Embeddings
-from langchain.schema import BaseRetriever
-from langchain_community.vectorstores import VectorStore, Weaviate
+from langchain_community.vectorstores import Weaviate
from langchain_core.documents import Document
+from langchain_core.embeddings import Embeddings
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/base/model.py b/src/backend/base/langflow/components/vectorstores/base/model.py
index 668c5eff2..18a37c9cf 100644
--- a/src/backend/base/langflow/components/vectorstores/base/model.py
+++ b/src/backend/base/langflow/components/vectorstores/base/model.py
@@ -4,9 +4,9 @@ from langchain_core.documents import Document
from langchain_core.retrievers import BaseRetriever
from langchain_core.vectorstores import VectorStore
+from langflow.custom import CustomComponent
from langflow.field_typing import Text
from langflow.helpers.record import docs_to_records
-from langflow.interface.custom.custom_component import CustomComponent
from langflow.schema import Record
diff --git a/src/backend/base/langflow/components/vectorstores/pgvector.py b/src/backend/base/langflow/components/vectorstores/pgvector.py
index b061b22ac..75c833ded 100644
--- a/src/backend/base/langflow/components/vectorstores/pgvector.py
+++ b/src/backend/base/langflow/components/vectorstores/pgvector.py
@@ -1,11 +1,11 @@
from typing import Optional, Union
-from langchain.embeddings.base import Embeddings
-from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.pgvector import PGVector
+from langchain_core.embeddings import Embeddings
from langchain_core.retrievers import BaseRetriever
+from langchain_core.vectorstores import VectorStore
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.schema.schema import Record
diff --git a/src/backend/base/langflow/custom.py b/src/backend/base/langflow/custom.py
deleted file mode 100644
index ebe4c04e8..000000000
--- a/src/backend/base/langflow/custom.py
+++ /dev/null
@@ -1 +0,0 @@
-from langflow.interface.custom.custom_component import CustomComponent # noqa: F401
diff --git a/src/backend/base/langflow/custom/__init__.py b/src/backend/base/langflow/custom/__init__.py
new file mode 100644
index 000000000..bd789498a
--- /dev/null
+++ b/src/backend/base/langflow/custom/__init__.py
@@ -0,0 +1,3 @@
+from langflow.custom.custom_component import CustomComponent
+
+__all__ = ["CustomComponent"]
diff --git a/src/backend/base/langflow/interface/custom/attributes.py b/src/backend/base/langflow/custom/attributes.py
similarity index 100%
rename from src/backend/base/langflow/interface/custom/attributes.py
rename to src/backend/base/langflow/custom/attributes.py
diff --git a/src/backend/base/langflow/interface/custom/code_parser/__init__.py b/src/backend/base/langflow/custom/code_parser/__init__.py
similarity index 100%
rename from src/backend/base/langflow/interface/custom/code_parser/__init__.py
rename to src/backend/base/langflow/custom/code_parser/__init__.py
diff --git a/src/backend/base/langflow/interface/custom/code_parser/code_parser.py b/src/backend/base/langflow/custom/code_parser/code_parser.py
similarity index 98%
rename from src/backend/base/langflow/interface/custom/code_parser/code_parser.py
rename to src/backend/base/langflow/custom/code_parser/code_parser.py
index 44dbbc1d9..17fe12896 100644
--- a/src/backend/base/langflow/interface/custom/code_parser/code_parser.py
+++ b/src/backend/base/langflow/custom/code_parser/code_parser.py
@@ -8,8 +8,8 @@ from cachetools import TTLCache, cachedmethod, keys
from fastapi import HTTPException
from loguru import logger
-from langflow.interface.custom.eval import eval_custom_component_code
-from langflow.interface.custom.schema import CallableCodeDetails, ClassCodeDetails, MissingDefault
+from langflow.custom.eval import eval_custom_component_code
+from langflow.custom.schema import CallableCodeDetails, ClassCodeDetails, MissingDefault
class CodeSyntaxError(HTTPException):
diff --git a/src/backend/base/langflow/interface/custom/code_parser/utils.py b/src/backend/base/langflow/custom/code_parser/utils.py
similarity index 100%
rename from src/backend/base/langflow/interface/custom/code_parser/utils.py
rename to src/backend/base/langflow/custom/code_parser/utils.py
diff --git a/src/backend/base/langflow/interface/custom/custom_component/__init__.py b/src/backend/base/langflow/custom/custom_component/__init__.py
similarity index 100%
rename from src/backend/base/langflow/interface/custom/custom_component/__init__.py
rename to src/backend/base/langflow/custom/custom_component/__init__.py
diff --git a/src/backend/base/langflow/interface/custom/custom_component/component.py b/src/backend/base/langflow/custom/custom_component/component.py
similarity index 93%
rename from src/backend/base/langflow/interface/custom/custom_component/component.py
rename to src/backend/base/langflow/custom/custom_component/component.py
index 470ebcde8..d45b5daed 100644
--- a/src/backend/base/langflow/interface/custom/custom_component/component.py
+++ b/src/backend/base/langflow/custom/custom_component/component.py
@@ -5,9 +5,9 @@ from typing import Any, ClassVar, Optional
from cachetools import TTLCache, cachedmethod
from fastapi import HTTPException
-from langflow.interface.custom.attributes import ATTR_FUNC_MAPPING
-from langflow.interface.custom.code_parser import CodeParser
-from langflow.interface.custom.eval import eval_custom_component_code
+from langflow.custom.attributes import ATTR_FUNC_MAPPING
+from langflow.custom.code_parser import CodeParser
+from langflow.custom.eval import eval_custom_component_code
from langflow.utils import validate
diff --git a/src/backend/base/langflow/interface/custom/custom_component/custom_component.py b/src/backend/base/langflow/custom/custom_component/custom_component.py
similarity index 99%
rename from src/backend/base/langflow/interface/custom/custom_component/custom_component.py
rename to src/backend/base/langflow/custom/custom_component/custom_component.py
index 1638ebb2c..aeac9cae6 100644
--- a/src/backend/base/langflow/interface/custom/custom_component/custom_component.py
+++ b/src/backend/base/langflow/custom/custom_component/custom_component.py
@@ -7,13 +7,12 @@ import yaml
from cachetools import TTLCache, cachedmethod
from langchain_core.documents import Document
from pydantic import BaseModel
-
-from langflow.helpers.flow import list_flows, load_flow, run_flow
-from langflow.interface.custom.code_parser.utils import (
+from langflow.custom.code_parser.utils import (
extract_inner_type_from_generic_alias,
extract_union_types_from_generic_alias,
)
-from langflow.interface.custom.custom_component.component import Component
+from langflow.custom.custom_component.component import Component
+from langflow.helpers.flow import list_flows, load_flow, run_flow
from langflow.schema import Record
from langflow.schema.dotdict import dotdict
from langflow.services.deps import get_storage_service, get_variable_service, session_scope
diff --git a/src/backend/base/langflow/interface/custom/directory_reader/__init__.py b/src/backend/base/langflow/custom/directory_reader/__init__.py
similarity index 100%
rename from src/backend/base/langflow/interface/custom/directory_reader/__init__.py
rename to src/backend/base/langflow/custom/directory_reader/__init__.py
diff --git a/src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py b/src/backend/base/langflow/custom/directory_reader/directory_reader.py
similarity index 98%
rename from src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py
rename to src/backend/base/langflow/custom/directory_reader/directory_reader.py
index e9f3f6ceb..b9f55f21f 100644
--- a/src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py
+++ b/src/backend/base/langflow/custom/directory_reader/directory_reader.py
@@ -5,7 +5,7 @@ from pathlib import Path
from loguru import logger
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
class CustomComponentPathValueError(ValueError):
@@ -67,7 +67,7 @@ class DirectoryReader:
return len(file_content.strip()) == 0
def filter_loaded_components(self, data: dict, with_errors: bool) -> dict:
- from langflow.interface.custom.utils import build_component
+ from langflow.custom.utils import build_component
items = []
for menu in data["menu"]:
diff --git a/src/backend/base/langflow/interface/custom/directory_reader/utils.py b/src/backend/base/langflow/custom/directory_reader/utils.py
similarity index 98%
rename from src/backend/base/langflow/interface/custom/directory_reader/utils.py
rename to src/backend/base/langflow/custom/directory_reader/utils.py
index 2772cb78c..ddd24d8f3 100644
--- a/src/backend/base/langflow/interface/custom/directory_reader/utils.py
+++ b/src/backend/base/langflow/custom/directory_reader/utils.py
@@ -1,6 +1,6 @@
from loguru import logger
-from langflow.interface.custom.directory_reader import DirectoryReader
+from langflow.custom.directory_reader import DirectoryReader
from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode
diff --git a/src/backend/base/langflow/interface/custom/eval.py b/src/backend/base/langflow/custom/eval.py
similarity index 80%
rename from src/backend/base/langflow/interface/custom/eval.py
rename to src/backend/base/langflow/custom/eval.py
index b36f10d92..baa202402 100644
--- a/src/backend/base/langflow/interface/custom/eval.py
+++ b/src/backend/base/langflow/custom/eval.py
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Type
from langflow.utils import validate
if TYPE_CHECKING:
- from langflow.interface.custom.custom_component import CustomComponent
+ from langflow.custom import CustomComponent
def eval_custom_component_code(code: str) -> Type["CustomComponent"]:
diff --git a/src/backend/base/langflow/interface/custom/schema.py b/src/backend/base/langflow/custom/schema.py
similarity index 100%
rename from src/backend/base/langflow/interface/custom/schema.py
rename to src/backend/base/langflow/custom/schema.py
diff --git a/src/backend/base/langflow/interface/custom/utils.py b/src/backend/base/langflow/custom/utils.py
similarity index 97%
rename from src/backend/base/langflow/interface/custom/utils.py
rename to src/backend/base/langflow/custom/utils.py
index 2935a363a..5f7af956e 100644
--- a/src/backend/base/langflow/interface/custom/utils.py
+++ b/src/backend/base/langflow/custom/utils.py
@@ -10,17 +10,17 @@ from fastapi import HTTPException
from loguru import logger
from pydantic import BaseModel
-from langflow.field_typing.range_spec import RangeSpec
-from langflow.interface.custom.attributes import ATTR_FUNC_MAPPING
-from langflow.interface.custom.code_parser.utils import extract_inner_type
-from langflow.interface.custom.custom_component import CustomComponent
-from langflow.interface.custom.directory_reader.utils import (
+from langflow.custom import CustomComponent
+from langflow.custom.attributes import ATTR_FUNC_MAPPING
+from langflow.custom.code_parser.utils import extract_inner_type
+from langflow.custom.directory_reader.utils import (
build_custom_component_list_from_path,
determine_component_name,
merge_nested_dicts_with_renaming,
)
-from langflow.interface.custom.eval import eval_custom_component_code
-from langflow.interface.custom.schema import MissingDefault
+from langflow.custom.eval import eval_custom_component_code
+from langflow.custom.schema import MissingDefault
+from langflow.field_typing.range_spec import RangeSpec
from langflow.schema import dotdict
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode
diff --git a/src/backend/base/langflow/field_typing/constants.py b/src/backend/base/langflow/field_typing/constants.py
index 2e8fd4b3b..d73257c14 100644
--- a/src/backend/base/langflow/field_typing/constants.py
+++ b/src/backend/base/langflow/field_typing/constants.py
@@ -2,17 +2,18 @@ from typing import Callable, Dict, Text, Union
from langchain.agents.agent import AgentExecutor
from langchain.chains.base import Chain
-from langchain.document_loaders.base import BaseLoader
-from langchain.llms.base import BaseLLM
from langchain.memory.chat_memory import BaseChatMemory
-from langchain.prompts import BasePromptTemplate, ChatPromptTemplate, PromptTemplate
-from langchain.schema import BaseOutputParser, BaseRetriever, Document
-from langchain.schema.embeddings import Embeddings
-from langchain.schema.language_model import BaseLanguageModel
-from langchain.schema.memory import BaseMemory
-from langchain.text_splitter import TextSplitter
-from langchain.tools import Tool
-from langchain_community.vectorstores import VectorStore
+from langchain_core.document_loaders import BaseLoader
+from langchain_core.documents import Document
+from langchain_core.embeddings import Embeddings
+from langchain_core.language_models import BaseLLM, BaseLanguageModel
+from langchain_core.memory import BaseMemory
+from langchain_core.output_parsers import BaseOutputParser
+from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate, PromptTemplate
+from langchain_core.retrievers import BaseRetriever
+from langchain_core.tools import Tool
+from langchain_core.vectorstores import VectorStore
+from langchain_text_splitters import TextSplitter
# Type alias for more complex dicts
NestedDict = Dict[str, Union[str, Dict]]
diff --git a/src/backend/base/langflow/graph/__init__.py b/src/backend/base/langflow/graph/__init__.py
index e80dcaa5f..bb93f92cf 100644
--- a/src/backend/base/langflow/graph/__init__.py
+++ b/src/backend/base/langflow/graph/__init__.py
@@ -1,39 +1,6 @@
from langflow.graph.edge.base import Edge
from langflow.graph.graph.base import Graph
from langflow.graph.vertex.base import Vertex
-from langflow.graph.vertex.types import (
- AgentVertex,
- ChainVertex,
- CustomComponentVertex,
- DocumentLoaderVertex,
- EmbeddingVertex,
- LLMVertex,
- MemoryVertex,
- PromptVertex,
- RetrieverVertex,
- TextSplitterVertex,
- ToolkitVertex,
- ToolVertex,
- VectorStoreVertex,
- WrapperVertex,
-)
+from langflow.graph.vertex.types import CustomComponentVertex, InterfaceVertex, StateVertex
-__all__ = [
- "Graph",
- "Vertex",
- "Edge",
- "AgentVertex",
- "ChainVertex",
- "DocumentLoaderVertex",
- "EmbeddingVertex",
- "LLMVertex",
- "MemoryVertex",
- "PromptVertex",
- "TextSplitterVertex",
- "ToolVertex",
- "ToolkitVertex",
- "VectorStoreVertex",
- "WrapperVertex",
- "RetrieverVertex",
- "CustomComponentVertex",
-]
+__all__ = ["Edge", "Graph", "Vertex", "CustomComponentVertex", "InterfaceVertex", "StateVertex"]
diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py
index 001dd7b51..37215bdeb 100644
--- a/src/backend/base/langflow/graph/graph/base.py
+++ b/src/backend/base/langflow/graph/graph/base.py
@@ -14,8 +14,7 @@ from langflow.graph.graph.state_manager import GraphStateManager
from langflow.graph.graph.utils import process_flow
from langflow.graph.schema import InterfaceComponentTypes, RunOutputs
from langflow.graph.vertex.base import Vertex
-from langflow.graph.vertex.types import FileToolVertex, InterfaceVertex, LLMVertex, StateVertex, ToolkitVertex
-from langflow.interface.tools.constants import FILE_TOOLS
+from langflow.graph.vertex.types import InterfaceVertex, StateVertex
from langflow.schema import Record
from langflow.schema.schema import INPUT_FIELD_NAME, InputType
from langflow.services.deps import get_chat_service
@@ -687,16 +686,8 @@ class Graph:
def _build_vertex_params(self) -> None:
"""Identifies and handles the LLM vertex within the graph."""
- llm_vertex = None
for vertex in self.vertices:
vertex._build_params()
- if isinstance(vertex, LLMVertex):
- llm_vertex = vertex
-
- if llm_vertex:
- for vertex in self.vertices:
- if isinstance(vertex, ToolkitVertex):
- vertex.params["llm"] = llm_vertex
def _validate_vertex(self, vertex: Vertex) -> bool:
"""Validates a vertex."""
@@ -1004,8 +995,6 @@ class Graph:
elif node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name]
- if node_type in FILE_TOOLS:
- return FileToolVertex
if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type]
return (
diff --git a/src/backend/base/langflow/graph/graph/constants.py b/src/backend/base/langflow/graph/graph/constants.py
index 2ed42fbff..8f5840524 100644
--- a/src/backend/base/langflow/graph/graph/constants.py
+++ b/src/backend/base/langflow/graph/graph/constants.py
@@ -1,17 +1,5 @@
from langflow.graph.schema import CHAT_COMPONENTS
from langflow.graph.vertex import types
-from langflow.interface.agents.base import agent_creator
-from langflow.interface.custom.base import custom_component_creator
-from langflow.interface.document_loaders.base import documentloader_creator
-from langflow.interface.embeddings.base import embedding_creator
-from langflow.interface.llms.base import llm_creator
-from langflow.interface.memories.base import memory_creator
-from langflow.interface.prompts.base import prompt_creator
-from langflow.interface.retrievers.base import retriever_creator
-from langflow.interface.text_splitters.base import textsplitter_creator
-from langflow.interface.toolkits.base import toolkits_creator
-from langflow.interface.tools.base import tool_creator
-from langflow.interface.wrappers.base import wrapper_creator
from langflow.utils.lazy_load import LazyLoadDictBase
@@ -32,20 +20,7 @@ class VertexTypesDict(LazyLoadDictBase):
def get_type_dict(self):
return {
- **{t: types.PromptVertex for t in prompt_creator.to_list()},
- **{t: types.AgentVertex for t in agent_creator.to_list()},
- # **{t: types.ChainVertex for t in chain_creator.to_list()},
- **{t: types.ToolVertex for t in tool_creator.to_list()},
- **{t: types.ToolkitVertex for t in toolkits_creator.to_list()},
- **{t: types.WrapperVertex for t in wrapper_creator.to_list()},
- **{t: types.LLMVertex for t in llm_creator.to_list()},
- **{t: types.MemoryVertex for t in memory_creator.to_list()},
- **{t: types.EmbeddingVertex for t in embedding_creator.to_list()},
- # **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
- **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
- **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
- **{t: types.CustomComponentVertex for t in custom_component_creator.to_list()},
- **{t: types.RetrieverVertex for t in retriever_creator.to_list()},
+ **{t: types.CustomComponentVertex for t in ["CustomComponent"]},
**{t: types.InterfaceVertex for t in CHAT_COMPONENTS},
}
diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py
index 3cbba34a1..48cfa82ce 100644
--- a/src/backend/base/langflow/graph/vertex/base.py
+++ b/src/backend/base/langflow/graph/vertex/base.py
@@ -10,7 +10,7 @@ from loguru import logger
from langflow.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData
from langflow.graph.utils import UnbuiltObject, UnbuiltResult
-from langflow.graph.vertex.utils import generate_result, log_transaction
+from langflow.graph.vertex.utils import log_transaction
from langflow.interface.initialize import loading
from langflow.interface.listing import lazy_load_dict
from langflow.schema.schema import INPUT_FIELD_NAME
@@ -455,29 +455,6 @@ class Vertex:
)
self.set_result(result_dict)
- async def _run(
- self,
- user_id: str,
- inputs: Optional[dict] = None,
- session_id: Optional[str] = None,
- ):
- # user_id is just for compatibility with the other build methods
- inputs = inputs or {}
- # inputs = {key: value or "" for key, value in inputs.items()}
- # if hasattr(self._built_object, "input_keys"):
- # # test if all keys are in inputs
- # # and if not add them with empty string
- # # for key in self._built_object.input_keys:
- # # if key not in inputs:
- # # inputs[key] = ""
- # if inputs == {} and hasattr(self._built_object, "prompt"):
- # inputs = self._built_object.prompt.partial_variables
- if isinstance(self._built_object, str):
- self._built_result = self._built_object
-
- result = await generate_result(self._built_object, inputs, self.has_external_output, session_id)
- self._built_result = result
-
async def _build_each_vertex_in_params_dict(self, user_id=None):
"""
Iterates over each vertex in the params dictionary and builds it.
diff --git a/src/backend/base/langflow/graph/vertex/types.py b/src/backend/base/langflow/graph/vertex/types.py
index 7d7608b9e..9418dde35 100644
--- a/src/backend/base/langflow/graph/vertex/types.py
+++ b/src/backend/base/langflow/graph/vertex/types.py
@@ -1,15 +1,13 @@
-import ast
import json
-from typing import AsyncIterator, Callable, Dict, Iterator, List, Optional, Union
+from typing import AsyncIterator, Dict, Iterator, List
import yaml
from langchain_core.messages import AIMessage
from loguru import logger
from langflow.graph.schema import CHAT_COMPONENTS, RECORDS_COMPONENTS, InterfaceComponentTypes
-from langflow.graph.utils import UnbuiltObject, flatten_list, serialize_field
+from langflow.graph.utils import UnbuiltObject, serialize_field
from langflow.graph.vertex.base import Vertex
-from langflow.interface.utils import extract_input_variables_from_prompt
from langflow.schema import Record
from langflow.schema.schema import INPUT_FIELD_NAME
from langflow.services.monitor.utils import log_vertex_build
@@ -17,289 +15,6 @@ from langflow.utils.schemas import ChatOutputResponse, RecordOutputResponse
from langflow.utils.util import unescape_string
-class AgentVertex(Vertex):
- def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
- super().__init__(data, graph=graph, base_type="agents", params=params)
-
- self.tools: List[Union[ToolkitVertex, ToolVertex]] = []
- self.chains: List[ChainVertex] = []
- self.steps: List[Callable] = [self._custom_build]
-
- def __getstate__(self):
- state = super().__getstate__()
- state["tools"] = self.tools
- state["chains"] = self.chains
- return state
-
- def __setstate__(self, state):
- self.tools = state["tools"]
- self.chains = state["chains"]
- super().__setstate__(state)
-
- def _set_tools_and_chains(self) -> None:
- for edge in self.edges:
- if not hasattr(edge, "source"):
- continue
- source_node = edge.source
- if isinstance(source_node, (ToolVertex, ToolkitVertex)):
- self.tools.append(source_node)
- elif isinstance(source_node, ChainVertex):
- self.chains.append(source_node)
-
- async def _custom_build(self, *args, **kwargs):
- user_id = kwargs.get("user_id", None)
- self._set_tools_and_chains()
- # First, build the tools
- for tool_node in self.tools:
- await tool_node.build(user_id=user_id)
-
- # Next, build the chains and the rest
- for chain_node in self.chains:
- await chain_node.build(tools=self.tools, user_id=user_id)
-
- await self._build(user_id=user_id)
-
-
-class ToolVertex(Vertex):
- def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
- super().__init__(data, graph=graph, base_type="tools", params=params)
-
-
-class LLMVertex(Vertex):
- built_node_type = None
- class_built_object = None
-
- def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
- super().__init__(data, graph=graph, base_type="models", params=params)
- self.steps: List[Callable] = [self._custom_build]
-
- async def _custom_build(self, *args, **kwargs):
- # LLM is different because some models might take up too much memory
- # or time to load. So we only load them when we need them.
- # Avoid deepcopying the LLM
- # that are loaded from a file
- force = kwargs.get("force", False)
- user_id = kwargs.get("user_id", None)
- if self.vertex_type == self.built_node_type:
- self._built_object = self.class_built_object
- if not self._built or force:
- await self._build(user_id=user_id)
- self.built_node_type = self.vertex_type
- self.class_built_object = self._built_object
-
-
-class ToolkitVertex(Vertex):
- def __init__(self, data: Dict, graph, params=None):
- super().__init__(data, graph=graph, base_type="toolkits", params=params)
-
-
-class FileToolVertex(ToolVertex):
- def __init__(self, data: Dict, graph, params=None):
- super().__init__(
- data,
- params=params,
- graph=graph,
- )
-
-
-class WrapperVertex(Vertex):
- def __init__(self, data: Dict, graph, params=None):
- super().__init__(data, graph=graph, base_type="wrappers")
- self.steps: List[Callable] = [self._custom_build]
-
- async def _custom_build(self, *args, **kwargs):
- force = kwargs.get("force", False)
- user_id = kwargs.get("user_id", None)
- if not self._built or force:
- if "headers" in self.params:
- self.params["headers"] = ast.literal_eval(self.params["headers"])
- await self._build(user_id=user_id)
-
-
-class DocumentLoaderVertex(Vertex):
- def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
- super().__init__(data, graph=graph, base_type="documentloaders", params=params)
-
- def _built_object_repr(self):
- # This built_object is a list of documents. Maybe we should
- # show how many documents are in the list?
-
- if not isinstance(self._built_object, UnbuiltObject):
- avg_length = sum(len(record.get_text()) for record in self._built_object if hasattr(record, "text")) / len(
- self._built_object
- )
- return f"""{self.display_name}({len(self._built_object)} records)
- \nAvg. Record Length (characters): {int(avg_length)}
- Records: {self._built_object[:3]}..."""
- return f"{self.vertex_type}()"
-
-
-class EmbeddingVertex(Vertex):
- def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
- super().__init__(data, graph=graph, base_type="embeddings", params=params)
-
-
-class VectorStoreVertex(Vertex):
- def __init__(self, data: Dict, graph, params=None):
- super().__init__(data, graph=graph, base_type="vectorstores")
-
- self.params = params or {}
-
- # VectorStores may contain databse connections
- # so we need to define the __reduce__ method and the __setstate__ method
- # to avoid pickling errors
- def clean_edges_for_pickling(self):
- # for each edge that has self as source
- # we need to clear the _built_object of the target
- # so that we don't try to pickle a database connection
- for edge in self.edges:
- if edge.source == self:
- edge.target._built_object = None
- edge.target._built = False
- edge.target.params[edge.target_param] = self
-
- def remove_docs_and_texts_from_params(self):
- # remove documents and texts from params
- # so that we don't try to pickle a database connection
- self.params.pop("documents", None)
- self.params.pop("texts", None)
-
- def __getstate__(self):
- # We want to save the params attribute
- # and if "documents" or "texts" are in the params
- # we want to remove them because they have already
- # been processed.
- params = self.params.copy()
- params.pop("documents", None)
- params.pop("texts", None)
- self.clean_edges_for_pickling()
-
- return super().__getstate__()
-
- def __setstate__(self, state):
- super().__setstate__(state)
- self.remove_docs_and_texts_from_params()
-
-
-class MemoryVertex(Vertex):
- def __init__(self, data: Dict, graph):
- super().__init__(data, graph=graph, base_type="memory")
-
-
-class RetrieverVertex(Vertex):
- def __init__(self, data: Dict, graph):
- super().__init__(data, graph=graph, base_type="retrievers")
-
-
-class TextSplitterVertex(Vertex):
- def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
- super().__init__(data, graph=graph, base_type="textsplitters", params=params)
-
- def _built_object_repr(self):
- # This built_object is a list of documents. Maybe we should
- # show how many documents are in the list?
-
- if not isinstance(self._built_object, UnbuiltObject):
- avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(self._built_object)
- return f"""{self.vertex_type}({len(self._built_object)} documents)
- \nAvg. Document Length (characters): {int(avg_length)}
- \nDocuments: {self._built_object[:3]}..."""
- return f"{self.vertex_type}()"
-
-
-class ChainVertex(Vertex):
- def __init__(self, data: Dict, graph):
- super().__init__(data, graph=graph, base_type="chains")
- self.steps = [self._custom_build]
-
- async def _custom_build(self, *args, **kwargs):
- force = kwargs.get("force", False)
- user_id = kwargs.get("user_id", None)
- # Remove this once LLMChain is CustomComponent
- self.params.pop("code", None)
- for key, value in self.params.items():
- if isinstance(value, PromptVertex):
- # Build the PromptVertex, passing the tools if available
- tools = kwargs.get("tools", None)
- self.params[key] = value.build(tools=tools, frozen=force)
-
- await self._build(user_id=user_id)
-
- def set_artifacts(self) -> None:
- if isinstance(self._built_object, UnbuiltObject):
- return
- if self._built_object and hasattr(self._built_object, "input_keys"):
- self.artifacts = dict(input_keys=self._built_object.input_keys)
-
- def _built_object_repr(self):
- if isinstance(self._built_object, str):
- return self._built_object
- return super()._built_object_repr()
-
-
-class PromptVertex(Vertex):
- def __init__(self, data: Dict, graph):
- super().__init__(data, graph=graph, base_type="prompts")
- self.steps: List[Callable] = [self._custom_build]
-
- async def _custom_build(self, *args, **kwargs):
- force = kwargs.get("force", False)
- user_id = kwargs.get("user_id", None)
- tools = kwargs.get("tools", [])
- if not self._built or force:
- if "input_variables" not in self.params or self.params["input_variables"] is None:
- self.params["input_variables"] = []
- # Check if it is a ZeroShotPrompt and needs a tool
- if "ShotPrompt" in self.vertex_type:
- tools = [tool_node.build(user_id=user_id) for tool_node in tools] if tools is not None else []
- # flatten the list of tools if it is a list of lists
- # first check if it is a list
- if tools and isinstance(tools, list) and isinstance(tools[0], list):
- tools = flatten_list(tools)
- self.params["tools"] = tools
- prompt_params = [
- key for key, value in self.params.items() if isinstance(value, str) and key != "format_instructions"
- ]
- else:
- prompt_params = ["template"]
-
- if "prompt" not in self.params and "messages" not in self.params:
- for param in prompt_params:
- prompt_text = self.params[param]
- variables = extract_input_variables_from_prompt(prompt_text)
- self.params["input_variables"].extend(variables)
- self.params["input_variables"] = list(set(self.params["input_variables"]))
- elif isinstance(self.params, dict):
- self.params.pop("input_variables", None)
-
- await self._build(user_id=user_id)
-
- def _built_object_repr(self):
- if not self.artifacts or self._built_object is None or not hasattr(self._built_object, "format"):
- return super()._built_object_repr()
- elif isinstance(self._built_object, UnbuiltObject):
- return super()._built_object_repr()
- # We'll build the prompt with the artifacts
- # to show the user what the prompt looks like
- # with the variables filled in
- artifacts = self.artifacts.copy()
- # Remove the handle_keys from the artifacts
- # so the prompt format doesn't break
- artifacts.pop("handle_keys", None)
- try:
- if not hasattr(self._built_object, "template") and hasattr(self._built_object, "prompt"):
- template = self._built_object.prompt.template
- else:
- template = self._built_object.template
- for key, value in artifacts.items():
- if value:
- replace_key = "{" + key + "}"
- template = template.replace(replace_key, value)
- return template if isinstance(template, str) else f"{self.vertex_type}({template})"
- except KeyError:
- return str(self._built_object)
-
-
class CustomComponentVertex(Vertex):
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="custom_components")
diff --git a/src/backend/base/langflow/graph/vertex/utils.py b/src/backend/base/langflow/graph/vertex/utils.py
index b978424f2..59a1c1949 100644
--- a/src/backend/base/langflow/graph/vertex/utils.py
+++ b/src/backend/base/langflow/graph/vertex/utils.py
@@ -1,74 +1,13 @@
-from typing import Any, Optional, Union, TYPE_CHECKING
+from typing import TYPE_CHECKING
-from langchain_core.messages import BaseMessage
-from langchain_core.runnables import Runnable
from loguru import logger
from langflow.services.deps import get_monitor_service
-from langflow.utils.constants import PYTHON_BASIC_TYPES
if TYPE_CHECKING:
from langflow.graph.vertex.base import Vertex
-def is_basic_type(obj):
- return type(obj) in PYTHON_BASIC_TYPES
-
-
-async def invoke_lc_runnable(
- built_object: Runnable, inputs: dict, has_external_output: bool, session_id: Optional[str] = None, **kwargs
-) -> Union[str, BaseMessage]:
- # Setup callbacks for asynchronous execution
- from langflow.processing.base import setup_callbacks
-
- callbacks = setup_callbacks(sync=False, trace_id=session_id, **kwargs)
-
- try:
- if has_external_output and hasattr(built_object, "astream"):
- # Asynchronous stream handling if supported and required
- output = ""
- async for chunk in built_object.astream(inputs, {"callbacks": callbacks}):
- output += chunk
- return output
- else:
- # Direct asynchronous invocation
- return await built_object.ainvoke(inputs, {"callbacks": callbacks})
- except Exception as async_exc:
- logger.debug(f"Async error, falling back to sync: {str(async_exc)}")
-
- # Setup synchronous callbacks for the fallback
- sync_callbacks = setup_callbacks(sync=True, trace_id=session_id, **kwargs)
- try:
- # Synchronous fallback if asynchronous execution fails
- if has_external_output and hasattr(built_object, "stream"):
- # Synchronous stream handling if supported and required
- output = ""
- for chunk in built_object.stream(inputs, {"callbacks": sync_callbacks}):
- output += chunk
- return output
- else:
- # Direct synchronous invocation
- return built_object.invoke(inputs, {"callbacks": sync_callbacks})
- except Exception as sync_exc:
- logger.error(f"Sync error after async failure: {str(sync_exc)}")
- # Handle or re-raise exception as appropriate for your application
- raise sync_exc from async_exc
-
-
-async def generate_result(built_object: Any, inputs: dict, has_external_output: bool, session_id: Optional[str] = None):
- # If the built_object is instance of Runnable
- # we can call `invoke` or `stream` on it
- # if it has_external_outputl, we need to call `stream` if it has it
- # if not, we call `invoke` if it has it
- if isinstance(built_object, Runnable):
- result = await invoke_lc_runnable(
- built_object=built_object, inputs=inputs, has_external_output=has_external_output, session_id=session_id
- )
- else:
- result = built_object
- return result
-
-
def build_clean_params(target: "Vertex") -> dict:
"""
Cleans the parameters of the target vertex.
diff --git a/src/backend/base/langflow/initial_setup/setup.py b/src/backend/base/langflow/initial_setup/setup.py
index 3c976cb94..3066e2909 100644
--- a/src/backend/base/langflow/initial_setup/setup.py
+++ b/src/backend/base/langflow/initial_setup/setup.py
@@ -159,7 +159,7 @@ def create_new_project(
project_data,
project_icon,
project_icon_bg_color,
- new_folder_id
+ new_folder_id,
):
logger.debug(f"Creating starter project {project_name}")
new_project = FlowCreate(
@@ -206,7 +206,7 @@ def create_starter_folder(session):
def create_or_update_starter_projects():
- components_paths = get_settings_service().settings.COMPONENTS_PATH
+ components_paths = get_settings_service().settings.components_path
try:
all_types_dict = get_all_components(components_paths, as_dict=True)
except Exception as e:
@@ -247,5 +247,5 @@ def create_or_update_starter_projects():
project_data,
project_icon,
project_icon_bg_color,
- new_folder.id
+ new_folder.id,
)
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json
index acaa87841..c56a8304c 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json
@@ -1,886 +1,800 @@
{
- "id": "c091a57f-43a7-4a5e-b352-035ae8d8379c",
- "data": {
- "nodes": [
- {
- "id": "Prompt-uxBqP",
- "type": "genericNode",
- "position": {
- "x": 53.588791333410654,
- "y": -107.07318910019967
- },
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: ",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "user_input": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "user_input",
- "display_name": "user_input",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "str",
- "Text"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "user_input"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-uxBqP",
- "description": "Create a prompt template with dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": true,
- "width": 384,
- "height": 383,
- "dragging": false,
- "positionAbsolute": {
- "x": 53.588791333410654,
- "y": -107.07318910019967
- }
+ "id": "c091a57f-43a7-4a5e-b352-035ae8d8379c",
+ "data": {
+ "nodes": [
+ {
+ "id": "Prompt-uxBqP",
+ "type": "genericNode",
+ "position": {
+ "x": 53.588791333410654,
+ "y": -107.07318910019967
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: ",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "user_input": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "user_input",
+ "display_name": "user_input",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "OpenAIModel-k39HS",
- "type": "genericNode",
- "position": {
- "x": 634.8148772766217,
- "y": 27.035057029045305
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-3.5-turbo",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4o",
- "gpt-4-turbo",
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0125"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": true,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 0.1,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-k39HS",
- "description": "Generates text using OpenAI LLMs.",
- "display_name": "OpenAI"
- },
- "selected": false,
- "width": 384,
- "height": 563,
- "positionAbsolute": {
- "x": 634.8148772766217,
- "y": 27.035057029045305
- },
- "dragging": false
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["object", "str", "Text"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["user_input"]
},
- {
- "id": "ChatOutput-njtka",
- "type": "genericNode",
- "position": {
- "x": 1193.250417197867,
- "y": 71.88476890163852
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n files: Optional[list[str]] = None,\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n files=files,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "In case of Message being a Record, this template will be used to convert it to text.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "AI",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "Record",
- "Text",
- "str",
- "object"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null,
- "record_template": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-njtka"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "positionAbsolute": {
- "x": 1193.250417197867,
- "y": 71.88476890163852
- },
- "dragging": false
- },
- {
- "id": "ChatInput-P3fgL",
- "type": "genericNode",
- "position": {
- "x": -495.2223093083827,
- "y": -232.56998443685862
- },
- "data": {
- "type": "ChatInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_record=return_record,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "value": "hi"
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Get chat inputs from the Playground.",
- "icon": "ChatInput",
- "base_classes": [
- "object",
- "Record",
- "str",
- "Text"
- ],
- "display_name": "Chat Input",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatInput-P3fgL"
- },
- "selected": false,
- "width": 384,
- "height": 375,
- "positionAbsolute": {
- "x": -495.2223093083827,
- "y": -232.56998443685862
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "OpenAIModel-k39HS",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153}",
- "target": "ChatOutput-njtka",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-njtka\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-njtka",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-k39HS"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIModel-k39HS{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153}-ChatOutput-njtka{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-njtka\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- },
- {
- "source": "Prompt-uxBqP",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153}",
- "target": "OpenAIModel-k39HS",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-k39HS",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "str",
- "Text"
- ],
- "dataType": "Prompt",
- "id": "Prompt-uxBqP"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-uxBqP{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153}-OpenAIModel-k39HS{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- },
- {
- "source": "ChatInput-P3fgL",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-P3fgL\u0153}",
- "target": "Prompt-uxBqP",
- "targetHandle": "{\u0153fieldName\u0153:\u0153user_input\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "user_input",
- "id": "Prompt-uxBqP",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Record",
- "str",
- "Text"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-P3fgL"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-ChatInput-P3fgL{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-P3fgL\u0153}-Prompt-uxBqP{\u0153fieldName\u0153:\u0153user_input\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- }
- ],
- "viewport": {
- "x": 260.58251815500563,
- "y": 318.2261172111936,
- "zoom": 0.43514115784696294
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-uxBqP",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": true,
+ "width": 384,
+ "height": 383,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 53.588791333410654,
+ "y": -107.07318910019967
}
- },
- "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ",
- "name": "Basic Prompting (Hello, World)",
- "last_tested_version": "1.0.0a4",
- "is_component": false
+ },
+ {
+ "id": "OpenAIModel-k39HS",
+ "type": "genericNode",
+ "position": {
+ "x": 634.8148772766217,
+ "y": 27.035057029045305
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-3.5-turbo",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["object", "Text", "str"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-k39HS",
+ "description": "Generates text using OpenAI LLMs.",
+ "display_name": "OpenAI"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 563,
+ "positionAbsolute": {
+ "x": 634.8148772766217,
+ "y": 27.035057029045305
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-njtka",
+ "type": "genericNode",
+ "position": {
+ "x": 1193.250417197867,
+ "y": 71.88476890163852
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["Record", "Text", "str", "object"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
+ },
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-njtka"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 1193.250417197867,
+ "y": 71.88476890163852
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatInput-P3fgL",
+ "type": "genericNode",
+ "position": {
+ "x": -495.2223093083827,
+ "y": -232.56998443685862
+ },
+ "data": {
+ "type": "ChatInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "value": "hi"
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Get chat inputs from the Playground.",
+ "icon": "ChatInput",
+ "base_classes": ["object", "Record", "str", "Text"],
+ "display_name": "Chat Input",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
+ },
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatInput-P3fgL"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 375,
+ "positionAbsolute": {
+ "x": -495.2223093083827,
+ "y": -232.56998443685862
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "OpenAIModel-k39HS",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153}",
+ "target": "ChatOutput-njtka",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-njtka\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-njtka",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-k39HS"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIModel-k39HS{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153}-ChatOutput-njtka{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-njtka\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "Prompt-uxBqP",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153}",
+ "target": "OpenAIModel-k39HS",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-k39HS",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "str", "Text"],
+ "dataType": "Prompt",
+ "id": "Prompt-uxBqP"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-uxBqP{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153}-OpenAIModel-k39HS{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-k39HS\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "ChatInput-P3fgL",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-P3fgL\u0153}",
+ "target": "Prompt-uxBqP",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153user_input\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "user_input",
+ "id": "Prompt-uxBqP",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Record", "str", "Text"],
+ "dataType": "ChatInput",
+ "id": "ChatInput-P3fgL"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-ChatInput-P3fgL{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-P3fgL\u0153}-Prompt-uxBqP{\u0153fieldName\u0153:\u0153user_input\u0153,\u0153id\u0153:\u0153Prompt-uxBqP\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ }
+ ],
+ "viewport": {
+ "x": 260.58251815500563,
+ "y": 318.2261172111936,
+ "zoom": 0.43514115784696294
+ }
+ },
+ "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ",
+ "name": "Basic Prompting (Hello, World)",
+ "last_tested_version": "1.0.0a4",
+ "is_component": false
}
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json
index 4ac890f8c..d2c8cf951 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json
@@ -1,1096 +1,987 @@
{
- "id": "6ad5559d-fb66-4fdc-8f98-96f4ac12799d",
- "data": {
- "nodes": [
- {
- "id": "Prompt-Rse03",
- "type": "genericNode",
- "position": {
- "x": 1331.381712783371,
- "y": 535.0279854229713
- },
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Reference 1:\n\n{reference_1}\n\n---\n\nReference 2:\n\n{reference_2}\n\n---\n\n{instructions}\n\nBlog: \n\n\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "reference_1": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "reference_1",
- "display_name": "reference_1",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- },
- "reference_2": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "reference_2",
- "display_name": "reference_2",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- },
- "instructions": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "instructions",
- "display_name": "instructions",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "reference_1",
- "reference_2",
- "instructions"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-Rse03",
- "description": "Create a prompt template with dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 571,
- "dragging": false,
- "positionAbsolute": {
- "x": 1331.381712783371,
- "y": 535.0279854229713
- }
+ "id": "6ad5559d-fb66-4fdc-8f98-96f4ac12799d",
+ "data": {
+ "nodes": [
+ {
+ "id": "Prompt-Rse03",
+ "type": "genericNode",
+ "position": {
+ "x": 1331.381712783371,
+ "y": 535.0279854229713
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Reference 1:\n\n{reference_1}\n\n---\n\nReference 2:\n\n{reference_2}\n\n---\n\n{instructions}\n\nBlog: \n\n\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "reference_1": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "reference_1",
+ "display_name": "reference_1",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "reference_2": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "reference_2",
+ "display_name": "reference_2",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "instructions": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "instructions",
+ "display_name": "instructions",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "URL-HYPkR",
- "type": "genericNode",
- "position": {
- "x": 568.2971412887712,
- "y": 700.9983368007821
- },
- "data": {
- "type": "URL",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "urls": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "urls",
- "display_name": "URL",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": [
- "https://www.promptingguide.ai/techniques/prompt_chaining"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Fetch content from one or more URLs.",
- "icon": "layout-template",
- "base_classes": [
- "Record"
- ],
- "display_name": "URL",
- "documentation": "",
- "custom_fields": {
- "urls": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "URL-HYPkR"
- },
- "selected": false,
- "width": 384,
- "height": 281,
- "positionAbsolute": {
- "x": 568.2971412887712,
- "y": 700.9983368007821
- },
- "dragging": false
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["object", "Text", "str"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["reference_1", "reference_2", "instructions"]
},
- {
- "id": "ChatOutput-JPlxl",
- "type": "genericNode",
- "position": {
- "x": 2503.8617424688505,
- "y": 789.3005578928434
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n files: Optional[list[str]] = None,\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n files=files,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "In case of Message being a Record, this template will be used to convert it to text.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "AI",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "Text",
- "Record",
- "object",
- "str"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null,
- "record_template": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-JPlxl"
- },
- "selected": false,
- "width": 384,
- "height": 383
- },
- {
- "id": "OpenAIModel-gi29P",
- "type": "genericNode",
- "position": {
- "x": 1917.7089968570963,
- "y": 575.9186499244129
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "1024",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-3.5-turbo-0125",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4o",
- "gpt-4-turbo",
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0125"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "0.1",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "str",
- "Text",
- "object"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-gi29P"
- },
- "selected": false,
- "width": 384,
- "height": 563,
- "positionAbsolute": {
- "x": 1917.7089968570963,
- "y": 575.9186499244129
- },
- "dragging": false
- },
- {
- "id": "URL-2cX90",
- "type": "genericNode",
- "position": {
- "x": 573.961301764604,
- "y": 336.41463436122086
- },
- "data": {
- "type": "URL",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "urls": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "urls",
- "display_name": "URL",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": [
- "https://www.promptingguide.ai/introduction/basics"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Fetch content from one or more URLs.",
- "icon": "layout-template",
- "base_classes": [
- "Record"
- ],
- "display_name": "URL",
- "documentation": "",
- "custom_fields": {
- "urls": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "URL-2cX90"
- },
- "selected": false,
- "width": 384,
- "height": 281,
- "positionAbsolute": {
- "x": 573.961301764604,
- "y": 336.41463436122086
- },
- "dragging": false
- },
- {
- "id": "TextInput-og8Or",
- "type": "genericNode",
- "position": {
- "x": 569.9387927203336,
- "y": 1095.3352160671316
- },
- "data": {
- "type": "TextInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as input.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Optional[str] = \"\",\n record_template: Optional[str] = \"\",\n ) -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Use the references above for style to write a new blog/tutorial about prompt engineering techniques. Suggest non-covered topics.",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as input.",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Get text inputs from the Playground.",
- "icon": "type",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "Instructions",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextInput-og8Or"
- },
- "selected": false,
- "width": 384,
- "height": 289,
- "positionAbsolute": {
- "x": 569.9387927203336,
- "y": 1095.3352160671316
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "URL-HYPkR",
- "target": "Prompt-Rse03",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-HYPkR\u0153}",
- "targetHandle": "{\u0153fieldName\u0153:\u0153reference_2\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "id": "reactflow__edge-URL-HYPkR{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-HYPkR\u0153}-Prompt-Rse03{\u0153fieldName\u0153:\u0153reference_2\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "reference_2",
- "id": "Prompt-Rse03",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "URL",
- "id": "URL-HYPkR"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
- },
- {
- "source": "OpenAIModel-gi29P",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153}",
- "target": "ChatOutput-JPlxl",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-JPlxl\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-JPlxl",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Text",
- "object"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-gi29P"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIModel-gi29P{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153}-ChatOutput-JPlxl{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-JPlxl\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- },
- {
- "source": "URL-2cX90",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-2cX90\u0153}",
- "target": "Prompt-Rse03",
- "targetHandle": "{\u0153fieldName\u0153:\u0153reference_1\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "reference_1",
- "id": "Prompt-Rse03",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "URL",
- "id": "URL-2cX90"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-URL-2cX90{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-2cX90\u0153}-Prompt-Rse03{\u0153fieldName\u0153:\u0153reference_1\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- },
- {
- "source": "TextInput-og8Or",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-og8Or\u0153}",
- "target": "Prompt-Rse03",
- "targetHandle": "{\u0153fieldName\u0153:\u0153instructions\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "instructions",
- "id": "Prompt-Rse03",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "TextInput",
- "id": "TextInput-og8Or"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-TextInput-og8Or{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-og8Or\u0153}-Prompt-Rse03{\u0153fieldName\u0153:\u0153instructions\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- },
- {
- "source": "Prompt-Rse03",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153}",
- "target": "OpenAIModel-gi29P",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-gi29P",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "Prompt",
- "id": "Prompt-Rse03"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-Rse03{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153}-OpenAIModel-gi29P{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "selected": false
- }
- ],
- "viewport": {
- "x": -214.14726025721177,
- "y": -35.83855793844168,
- "zoom": 0.47344308394045925
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-Rse03",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 571,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 1331.381712783371,
+ "y": 535.0279854229713
}
- },
- "description": "This flow can be used to create a blog post following instructions from the user, using two other blogs as reference.",
- "name": "Blog Writer",
- "last_tested_version": "1.0.0a0",
- "is_component": false
+ },
+ {
+ "id": "URL-HYPkR",
+ "type": "genericNode",
+ "position": {
+ "x": 568.2971412887712,
+ "y": 700.9983368007821
+ },
+ "data": {
+ "type": "URL",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "urls": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "urls",
+ "display_name": "URL",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": [
+ "https://www.promptingguide.ai/techniques/prompt_chaining"
+ ]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Fetch content from one or more URLs.",
+ "icon": "layout-template",
+ "base_classes": ["Record"],
+ "display_name": "URL",
+ "documentation": "",
+ "custom_fields": {
+ "urls": null
+ },
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "URL-HYPkR"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 281,
+ "positionAbsolute": {
+ "x": 568.2971412887712,
+ "y": 700.9983368007821
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-JPlxl",
+ "type": "genericNode",
+ "position": {
+ "x": 2503.8617424688505,
+ "y": 789.3005578928434
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["Text", "Record", "object", "str"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
+ },
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-JPlxl"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383
+ },
+ {
+ "id": "OpenAIModel-gi29P",
+ "type": "genericNode",
+ "position": {
+ "x": 1917.7089968570963,
+ "y": 575.9186499244129
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "1024",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-3.5-turbo-0125",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "0.1",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["str", "Text", "object"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-gi29P"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 563,
+ "positionAbsolute": {
+ "x": 1917.7089968570963,
+ "y": 575.9186499244129
+ },
+ "dragging": false
+ },
+ {
+ "id": "URL-2cX90",
+ "type": "genericNode",
+ "position": {
+ "x": 573.961301764604,
+ "y": 336.41463436122086
+ },
+ "data": {
+ "type": "URL",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "urls": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "urls",
+ "display_name": "URL",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": ["https://www.promptingguide.ai/introduction/basics"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Fetch content from one or more URLs.",
+ "icon": "layout-template",
+ "base_classes": ["Record"],
+ "display_name": "URL",
+ "documentation": "",
+ "custom_fields": {
+ "urls": null
+ },
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "URL-2cX90"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 281,
+ "positionAbsolute": {
+ "x": 573.961301764604,
+ "y": 336.41463436122086
+ },
+ "dragging": false
+ },
+ {
+ "id": "TextInput-og8Or",
+ "type": "genericNode",
+ "position": {
+ "x": 569.9387927203336,
+ "y": 1095.3352160671316
+ },
+ "data": {
+ "type": "TextInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as input.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Optional[str] = \"\",\n record_template: Optional[str] = \"\",\n ) -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Use the references above for style to write a new blog/tutorial about prompt engineering techniques. Suggest non-covered topics.",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": ["Record", "Text"],
+ "dynamic": false,
+ "info": "Text or Record to be passed as input.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Get text inputs from the Playground.",
+ "icon": "type",
+ "base_classes": ["object", "Text", "str"],
+ "display_name": "Instructions",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextInput-og8Or"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 289,
+ "positionAbsolute": {
+ "x": 569.9387927203336,
+ "y": 1095.3352160671316
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "URL-HYPkR",
+ "target": "Prompt-Rse03",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-HYPkR\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153reference_2\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-URL-HYPkR{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-HYPkR\u0153}-Prompt-Rse03{\u0153fieldName\u0153:\u0153reference_2\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "reference_2",
+ "id": "Prompt-Rse03",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "URL",
+ "id": "URL-HYPkR"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIModel-gi29P",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153}",
+ "target": "ChatOutput-JPlxl",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-JPlxl\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-JPlxl",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Text", "object"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-gi29P"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIModel-gi29P{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153}-ChatOutput-JPlxl{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-JPlxl\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "URL-2cX90",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-2cX90\u0153}",
+ "target": "Prompt-Rse03",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153reference_1\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "reference_1",
+ "id": "Prompt-Rse03",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "URL",
+ "id": "URL-2cX90"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-URL-2cX90{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153URL\u0153,\u0153id\u0153:\u0153URL-2cX90\u0153}-Prompt-Rse03{\u0153fieldName\u0153:\u0153reference_1\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "TextInput-og8Or",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-og8Or\u0153}",
+ "target": "Prompt-Rse03",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153instructions\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "instructions",
+ "id": "Prompt-Rse03",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "TextInput",
+ "id": "TextInput-og8Or"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-TextInput-og8Or{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-og8Or\u0153}-Prompt-Rse03{\u0153fieldName\u0153:\u0153instructions\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "Prompt-Rse03",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153}",
+ "target": "OpenAIModel-gi29P",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-gi29P",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "Prompt",
+ "id": "Prompt-Rse03"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-Rse03{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-Rse03\u0153}-OpenAIModel-gi29P{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-gi29P\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "selected": false
+ }
+ ],
+ "viewport": {
+ "x": -214.14726025721177,
+ "y": -35.83855793844168,
+ "zoom": 0.47344308394045925
+ }
+ },
+ "description": "This flow can be used to create a blog post following instructions from the user, using two other blogs as reference.",
+ "name": "Blog Writer",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
}
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json
index 6e72838b8..32933e0d6 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json
@@ -1,1029 +1,933 @@
{
- "id": "fecbce42-6f11-454c-8ab2-db6eddbbbb0f",
- "data": {
- "nodes": [
- {
- "id": "Prompt-tHwPf",
- "type": "genericNode",
- "position": {
- "x": 585.7906101139403,
- "y": 117.52115876762832
- },
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Answer user's questions based on the document below:\n\n---\n\n{Document}\n\n---\n\nQuestion:\n{Question}\n\nAnswer:\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "Document": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "Document",
- "display_name": "Document",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- },
- "Question": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "Question",
- "display_name": "Question",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "str",
- "Text"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "Document",
- "Question"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-tHwPf",
- "description": "A component for creating prompt templates using dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 479,
- "positionAbsolute": {
- "x": 585.7906101139403,
- "y": 117.52115876762832
- },
- "dragging": false
+ "id": "fecbce42-6f11-454c-8ab2-db6eddbbbb0f",
+ "data": {
+ "nodes": [
+ {
+ "id": "Prompt-tHwPf",
+ "type": "genericNode",
+ "position": {
+ "x": 585.7906101139403,
+ "y": 117.52115876762832
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Answer user's questions based on the document below:\n\n---\n\n{Document}\n\n---\n\nQuestion:\n{Question}\n\nAnswer:\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "Document": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "Document",
+ "display_name": "Document",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "Question": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "Question",
+ "display_name": "Question",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "File-6TEsD",
- "type": "genericNode",
- "position": {
- "x": -18.636536329280602,
- "y": 3.951948774836353
- },
- "data": {
- "type": "File",
- "node": {
- "template": {
- "path": {
- "type": "file",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [
- ".txt",
- ".md",
- ".mdx",
- ".csv",
- ".json",
- ".yaml",
- ".yml",
- ".xml",
- ".html",
- ".htm",
- ".pdf",
- ".docx"
- ],
- "password": false,
- "name": "path",
- "display_name": "Path",
- "advanced": false,
- "dynamic": false,
- "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx",
- "load_from_db": false,
- "title_case": false,
- "value": ""
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"Files\"\n description = \"A generic file loader.\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "silent_errors": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "silent_errors",
- "display_name": "Silent Errors",
- "advanced": true,
- "dynamic": false,
- "info": "If true, errors will not raise an exception.",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "A generic file loader.",
- "base_classes": [
- "Record"
- ],
- "display_name": "Files",
- "documentation": "",
- "custom_fields": {
- "path": null,
- "silent_errors": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "File-6TEsD"
- },
- "selected": false,
- "width": 384,
- "height": 282,
- "positionAbsolute": {
- "x": -18.636536329280602,
- "y": 3.951948774836353
- },
- "dragging": false
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["object", "str", "Text"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["Document", "Question"]
},
- {
- "id": "ChatInput-MsSJ9",
- "type": "genericNode",
- "position": {
- "x": -28.80036300619821,
- "y": 379.81180230285355
- },
- "data": {
- "type": "ChatInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_record=return_record,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "value": ""
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Get chat inputs from the Playground.",
- "icon": "ChatInput",
- "base_classes": [
- "str",
- "Record",
- "Text",
- "object"
- ],
- "display_name": "Chat Input",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatInput-MsSJ9"
- },
- "selected": true,
- "width": 384,
- "height": 377,
- "positionAbsolute": {
- "x": -28.80036300619821,
- "y": 379.81180230285355
- },
- "dragging": false
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-tHwPf",
+ "description": "A component for creating prompt templates using dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 479,
+ "positionAbsolute": {
+ "x": 585.7906101139403,
+ "y": 117.52115876762832
+ },
+ "dragging": false
+ },
+ {
+ "id": "File-6TEsD",
+ "type": "genericNode",
+ "position": {
+ "x": -18.636536329280602,
+ "y": 3.951948774836353
+ },
+ "data": {
+ "type": "File",
+ "node": {
+ "template": {
+ "path": {
+ "type": "file",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [
+ ".txt",
+ ".md",
+ ".mdx",
+ ".csv",
+ ".json",
+ ".yaml",
+ ".yml",
+ ".xml",
+ ".html",
+ ".htm",
+ ".pdf",
+ ".docx"
+ ],
+ "password": false,
+ "name": "path",
+ "display_name": "Path",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx",
+ "load_from_db": false,
+ "title_case": false,
+ "value": ""
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"Files\"\n description = \"A generic file loader.\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "silent_errors": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "silent_errors",
+ "display_name": "Silent Errors",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If true, errors will not raise an exception.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "ChatOutput-F5Awj",
- "type": "genericNode",
- "position": {
- "x": 1733.3012915204283,
- "y": 168.76098809939327
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n files: Optional[list[str]] = None,\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n files=files,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "AI",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "str",
- "Record",
- "Text",
- "object"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-F5Awj"
- },
- "selected": false,
- "width": 384,
- "height": 385,
- "positionAbsolute": {
- "x": 1733.3012915204283,
- "y": 168.76098809939327
- },
- "dragging": false
+ "description": "A generic file loader.",
+ "base_classes": ["Record"],
+ "display_name": "Files",
+ "documentation": "",
+ "custom_fields": {
+ "path": null,
+ "silent_errors": null
},
- {
- "id": "OpenAIModel-Bt067",
- "type": "genericNode",
- "position": {
- "x": 1137.6078582863759,
- "y": -14.41920034020356
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-4-turbo-preview",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4o",
- "gpt-4-turbo",
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0125"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": false,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 0.1,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "object",
- "str",
- "Text"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-Bt067"
- },
- "selected": false,
- "width": 384,
- "height": 642,
- "positionAbsolute": {
- "x": 1137.6078582863759,
- "y": -14.41920034020356
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "ChatInput-MsSJ9",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Record\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-MsSJ9\u0153}",
- "target": "Prompt-tHwPf",
- "targetHandle": "{\u0153fieldName\u0153:\u0153Question\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "Question",
- "id": "Prompt-tHwPf",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Record",
- "Text",
- "object"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-MsSJ9"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-ChatInput-MsSJ9{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Record\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-MsSJ9\u0153}-Prompt-tHwPf{\u0153fieldName\u0153:\u0153Question\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "File-6TEsD"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 282,
+ "positionAbsolute": {
+ "x": -18.636536329280602,
+ "y": 3.951948774836353
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatInput-MsSJ9",
+ "type": "genericNode",
+ "position": {
+ "x": -28.80036300619821,
+ "y": 379.81180230285355
+ },
+ "data": {
+ "type": "ChatInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "value": ""
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "File-6TEsD",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-6TEsD\u0153}",
- "target": "Prompt-tHwPf",
- "targetHandle": "{\u0153fieldName\u0153:\u0153Document\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "Document",
- "id": "Prompt-tHwPf",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "File",
- "id": "File-6TEsD"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-File-6TEsD{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-6TEsD\u0153}-Prompt-tHwPf{\u0153fieldName\u0153:\u0153Document\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "description": "Get chat inputs from the Playground.",
+ "icon": "ChatInput",
+ "base_classes": ["str", "Record", "Text", "object"],
+ "display_name": "Chat Input",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
},
- {
- "source": "Prompt-tHwPf",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153}",
- "target": "OpenAIModel-Bt067",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-Bt067",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "str",
- "Text"
- ],
- "dataType": "Prompt",
- "id": "Prompt-tHwPf"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-tHwPf{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153}-OpenAIModel-Bt067{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatInput-MsSJ9"
+ },
+ "selected": true,
+ "width": 384,
+ "height": 377,
+ "positionAbsolute": {
+ "x": -28.80036300619821,
+ "y": 379.81180230285355
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-F5Awj",
+ "type": "genericNode",
+ "position": {
+ "x": 1733.3012915204283,
+ "y": 168.76098809939327
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIModel-Bt067",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153}",
- "target": "ChatOutput-F5Awj",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-F5Awj\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-F5Awj",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "str",
- "Text"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-Bt067"
- }
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["str", "Record", "Text", "object"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
+ },
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-F5Awj"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 385,
+ "positionAbsolute": {
+ "x": 1733.3012915204283,
+ "y": 168.76098809939327
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIModel-Bt067",
+ "type": "genericNode",
+ "position": {
+ "x": 1137.6078582863759,
+ "y": -14.41920034020356
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-4-turbo-preview",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIModel-Bt067{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153}-ChatOutput-F5Awj{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-F5Awj\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- }
- ],
- "viewport": {
- "x": 352.20899206064655,
- "y": 56.054900898593075,
- "zoom": 0.9023391400011
- }
- },
- "description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.",
- "name": "Document QA",
- "last_tested_version": "1.0.0a0",
- "is_component": false
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["object", "str", "Text"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-Bt067"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 642,
+ "positionAbsolute": {
+ "x": 1137.6078582863759,
+ "y": -14.41920034020356
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "ChatInput-MsSJ9",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Record\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-MsSJ9\u0153}",
+ "target": "Prompt-tHwPf",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153Question\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "Question",
+ "id": "Prompt-tHwPf",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Record", "Text", "object"],
+ "dataType": "ChatInput",
+ "id": "ChatInput-MsSJ9"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-ChatInput-MsSJ9{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Record\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-MsSJ9\u0153}-Prompt-tHwPf{\u0153fieldName\u0153:\u0153Question\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "File-6TEsD",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-6TEsD\u0153}",
+ "target": "Prompt-tHwPf",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153Document\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "Document",
+ "id": "Prompt-tHwPf",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "File",
+ "id": "File-6TEsD"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-File-6TEsD{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-6TEsD\u0153}-Prompt-tHwPf{\u0153fieldName\u0153:\u0153Document\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "Prompt-tHwPf",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153}",
+ "target": "OpenAIModel-Bt067",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-Bt067",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "str", "Text"],
+ "dataType": "Prompt",
+ "id": "Prompt-tHwPf"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-tHwPf{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-tHwPf\u0153}-OpenAIModel-Bt067{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "OpenAIModel-Bt067",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153}",
+ "target": "ChatOutput-F5Awj",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-F5Awj\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-F5Awj",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "str", "Text"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-Bt067"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIModel-Bt067{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-Bt067\u0153}-ChatOutput-F5Awj{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-F5Awj\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ }
+ ],
+ "viewport": {
+ "x": 352.20899206064655,
+ "y": 56.054900898593075,
+ "zoom": 0.9023391400011
+ }
+ },
+ "description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.",
+ "name": "Document QA",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
}
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json
index 81f8eefb0..9e51846be 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json
@@ -1,1272 +1,1137 @@
{
- "id": "08d5cccf-d098-4367-b14b-1078429c9ed9",
- "icon": "\ud83e\udd16",
- "icon_bg_color": "#FFD700",
- "data": {
- "nodes": [
- {
- "id": "ChatInput-t7F8v",
- "type": "genericNode",
- "position": {
- "x": 1283.2700598313072,
- "y": 982.5953650473145
- },
- "data": {
- "type": "ChatInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_record=return_record,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "value": ""
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": false,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "MySessionID"
- },
- "_type": "CustomComponent"
- },
- "description": "Get chat inputs from the Playground.",
- "icon": "ChatInput",
- "base_classes": [
- "Text",
- "object",
- "Record",
- "str"
- ],
- "display_name": "Chat Input",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatInput-t7F8v"
- },
- "selected": false,
- "width": 384,
- "height": 469,
- "positionAbsolute": {
- "x": 1283.2700598313072,
- "y": 982.5953650473145
- },
- "dragging": false
+ "id": "08d5cccf-d098-4367-b14b-1078429c9ed9",
+ "icon": "\ud83e\udd16",
+ "icon_bg_color": "#FFD700",
+ "data": {
+ "nodes": [
+ {
+ "id": "ChatInput-t7F8v",
+ "type": "genericNode",
+ "position": {
+ "x": 1283.2700598313072,
+ "y": 982.5953650473145
+ },
+ "data": {
+ "type": "ChatInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "value": ""
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": false,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "MySessionID"
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "ChatOutput-P1jEe",
- "type": "genericNode",
- "position": {
- "x": 3154.916355514023,
- "y": 851.051882666333
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n files: Optional[list[str]] = None,\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n files=files,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "AI",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": false,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "MySessionID"
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "Text",
- "object",
- "Record",
- "str"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-P1jEe"
- },
- "selected": false,
- "width": 384,
- "height": 477,
- "dragging": false,
- "positionAbsolute": {
- "x": 3154.916355514023,
- "y": 851.051882666333
- }
+ "description": "Get chat inputs from the Playground.",
+ "icon": "ChatInput",
+ "base_classes": ["Text", "object", "Record", "str"],
+ "display_name": "Chat Input",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
},
- {
- "id": "MemoryComponent-cdA1J",
- "type": "genericNode",
- "position": {
- "x": 1289.9606870058817,
- "y": 442.16804561053766
- },
- "data": {
- "type": "MemoryComponent",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.memory.memory import BaseMemoryComponent\nfrom langflow.field_typing import Text\nfrom langflow.helpers.record import records_to_text\nfrom langflow.memory import get_messages\nfrom langflow.schema.schema import Record\n\n\nclass MemoryComponent(BaseMemoryComponent):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages given a specific Session ID.\"\n beta: bool = True\n icon = \"history\"\n\n def build_config(self):\n return {\n \"sender\": {\n \"options\": [\"Machine\", \"User\", \"Machine and User\"],\n \"display_name\": \"Sender Type\",\n },\n \"sender_name\": {\"display_name\": \"Sender Name\", \"advanced\": True},\n \"n_messages\": {\n \"display_name\": \"Number of Messages\",\n \"info\": \"Number of messages to retrieve.\",\n },\n \"session_id\": {\n \"display_name\": \"Session ID\",\n \"info\": \"Session ID of the chat history.\",\n \"input_types\": [\"Text\"],\n },\n \"order\": {\n \"options\": [\"Ascending\", \"Descending\"],\n \"display_name\": \"Order\",\n \"info\": \"Order of the messages.\",\n \"advanced\": True,\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def get_messages(self, **kwargs) -> list[Record]:\n # Validate kwargs by checking if it contains the correct keys\n if \"sender\" not in kwargs:\n kwargs[\"sender\"] = None\n if \"sender_name\" not in kwargs:\n kwargs[\"sender_name\"] = None\n if \"session_id\" not in kwargs:\n kwargs[\"session_id\"] = None\n if \"limit\" not in kwargs:\n kwargs[\"limit\"] = 5\n if \"order\" not in kwargs:\n kwargs[\"order\"] = \"Descending\"\n\n kwargs[\"order\"] = \"DESC\" if kwargs[\"order\"] == \"Descending\" else \"ASC\"\n if kwargs[\"sender\"] == \"Machine and User\":\n kwargs[\"sender\"] = None\n return get_messages(**kwargs)\n\n def build(\n self,\n sender: Optional[str] = \"Machine and User\",\n sender_name: Optional[str] = None,\n session_id: Optional[str] = None,\n n_messages: int = 5,\n order: Optional[str] = \"Descending\",\n record_template: Optional[str] = \"{sender_name}: {text}\",\n ) -> Text:\n messages = self.get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n messages_str = records_to_text(template=record_template or \"\", records=messages)\n self.status = messages_str\n return messages_str\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "n_messages": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 5,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "n_messages",
- "display_name": "Number of Messages",
- "advanced": false,
- "dynamic": false,
- "info": "Number of messages to retrieve.",
- "load_from_db": false,
- "title_case": false
- },
- "order": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Descending",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Ascending",
- "Descending"
- ],
- "name": "order",
- "display_name": "Order",
- "advanced": true,
- "dynamic": false,
- "info": "Order of the messages.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{sender_name}: {text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine and User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User",
- "Machine and User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "Session ID of the chat history.",
- "load_from_db": false,
- "title_case": false,
- "value": "MySessionID"
- },
- "_type": "CustomComponent"
- },
- "description": "Retrieves stored chat messages given a specific Session ID.",
- "icon": "history",
- "base_classes": [
- "str",
- "Text",
- "object"
- ],
- "display_name": "Chat Memory",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "session_id": null,
- "n_messages": null,
- "order": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": true
- },
- "id": "MemoryComponent-cdA1J",
- "description": "Retrieves stored chat messages given a specific Session ID.",
- "display_name": "Chat Memory"
- },
- "selected": false,
- "width": 384,
- "height": 489,
- "dragging": false,
- "positionAbsolute": {
- "x": 1289.9606870058817,
- "y": 442.16804561053766
- }
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatInput-t7F8v"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 469,
+ "positionAbsolute": {
+ "x": 1283.2700598313072,
+ "y": 982.5953650473145
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-P1jEe",
+ "type": "genericNode",
+ "position": {
+ "x": 3154.916355514023,
+ "y": 851.051882666333
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": false,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "MySessionID"
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "Prompt-ODkUx",
- "type": "genericNode",
- "position": {
- "x": 1894.594426342426,
- "y": 753.3797365481901
- },
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "{context}\n\nUser: {user_message}\nAI: ",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "context": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "context",
- "display_name": "context",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- },
- "user_message": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "user_message",
- "display_name": "user_message",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "Text",
- "str",
- "object"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "context",
- "user_message"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-ODkUx",
- "description": "A component for creating prompt templates using dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 477,
- "dragging": false,
- "positionAbsolute": {
- "x": 1894.594426342426,
- "y": 753.3797365481901
- }
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["Text", "object", "Record", "str"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
},
- {
- "id": "OpenAIModel-9RykF",
- "type": "genericNode",
- "position": {
- "x": 2561.5850334731617,
- "y": 553.2745131130916
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-4-1106-preview",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4o",
- "gpt-4-turbo",
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0125"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "0.2",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "str",
- "object",
- "Text"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-9RykF"
- },
- "selected": false,
- "width": 384,
- "height": 563,
- "positionAbsolute": {
- "x": 2561.5850334731617,
- "y": 553.2745131130916
- },
- "dragging": false
- },
- {
- "id": "TextOutput-vrs6T",
- "type": "genericNode",
- "position": {
- "x": 1911.4785906252087,
- "y": 247.39079954376987
- },
- "data": {
- "type": "TextOutput",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as output.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a text output in the Playground.",
- "icon": "type",
- "base_classes": [
- "str",
- "object",
- "Text"
- ],
- "display_name": "Inspect Memory",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextOutput-vrs6T"
- },
- "selected": false,
- "width": 384,
- "height": 289,
- "positionAbsolute": {
- "x": 1911.4785906252087,
- "y": 247.39079954376987
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "MemoryComponent-cdA1J",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}",
- "target": "Prompt-ODkUx",
- "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "context",
- "type": "str",
- "id": "Prompt-ODkUx",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ]
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Text",
- "object"
- ],
- "dataType": "MemoryComponent",
- "id": "MemoryComponent-cdA1J"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-MemoryComponent-cdA1J{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}-Prompt-ODkUx{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "selected": false
- },
- {
- "source": "ChatInput-t7F8v",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-t7F8v\u0153}",
- "target": "Prompt-ODkUx",
- "targetHandle": "{\u0153fieldName\u0153:\u0153user_message\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "user_message",
- "type": "str",
- "id": "Prompt-ODkUx",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ]
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "object",
- "Record",
- "str"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-t7F8v"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-ChatInput-t7F8v{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-t7F8v\u0153}-Prompt-ODkUx{\u0153fieldName\u0153:\u0153user_message\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "selected": false
- },
- {
- "source": "Prompt-ODkUx",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153}",
- "target": "OpenAIModel-9RykF",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-9RykF",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "str",
- "object"
- ],
- "dataType": "Prompt",
- "id": "Prompt-ODkUx"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-ODkUx{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153}-OpenAIModel-9RykF{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- },
- {
- "source": "OpenAIModel-9RykF",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153object\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153}",
- "target": "ChatOutput-P1jEe",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-P1jEe\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-P1jEe",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "object",
- "Text"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-9RykF"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIModel-9RykF{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153object\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153}-ChatOutput-P1jEe{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-P1jEe\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- },
- {
- "source": "MemoryComponent-cdA1J",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}",
- "target": "TextOutput-vrs6T",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-vrs6T\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "TextOutput-vrs6T",
- "inputTypes": [
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Text",
- "object"
- ],
- "dataType": "MemoryComponent",
- "id": "MemoryComponent-cdA1J"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-foreground stroke-connection",
- "id": "reactflow__edge-MemoryComponent-cdA1J{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}-TextOutput-vrs6T{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-vrs6T\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- }
- ],
- "viewport": {
- "x": -569.862554459756,
- "y": -42.08339711050985,
- "zoom": 0.4868590524514978
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-P1jEe"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 477,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 3154.916355514023,
+ "y": 851.051882666333
}
- },
- "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.",
- "name": "Memory Chatbot",
- "last_tested_version": "1.0.0a0",
- "is_component": false
+ },
+ {
+ "id": "MemoryComponent-cdA1J",
+ "type": "genericNode",
+ "position": {
+ "x": 1289.9606870058817,
+ "y": 442.16804561053766
+ },
+ "data": {
+ "type": "MemoryComponent",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.memory.memory import BaseMemoryComponent\nfrom langflow.field_typing import Text\nfrom langflow.helpers.record import records_to_text\nfrom langflow.memory import get_messages\nfrom langflow.schema.schema import Record\n\n\nclass MemoryComponent(BaseMemoryComponent):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages given a specific Session ID.\"\n beta: bool = True\n icon = \"history\"\n\n def build_config(self):\n return {\n \"sender\": {\n \"options\": [\"Machine\", \"User\", \"Machine and User\"],\n \"display_name\": \"Sender Type\",\n },\n \"sender_name\": {\"display_name\": \"Sender Name\", \"advanced\": True},\n \"n_messages\": {\n \"display_name\": \"Number of Messages\",\n \"info\": \"Number of messages to retrieve.\",\n },\n \"session_id\": {\n \"display_name\": \"Session ID\",\n \"info\": \"Session ID of the chat history.\",\n \"input_types\": [\"Text\"],\n },\n \"order\": {\n \"options\": [\"Ascending\", \"Descending\"],\n \"display_name\": \"Order\",\n \"info\": \"Order of the messages.\",\n \"advanced\": True,\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def get_messages(self, **kwargs) -> list[Record]:\n # Validate kwargs by checking if it contains the correct keys\n if \"sender\" not in kwargs:\n kwargs[\"sender\"] = None\n if \"sender_name\" not in kwargs:\n kwargs[\"sender_name\"] = None\n if \"session_id\" not in kwargs:\n kwargs[\"session_id\"] = None\n if \"limit\" not in kwargs:\n kwargs[\"limit\"] = 5\n if \"order\" not in kwargs:\n kwargs[\"order\"] = \"Descending\"\n\n kwargs[\"order\"] = \"DESC\" if kwargs[\"order\"] == \"Descending\" else \"ASC\"\n if kwargs[\"sender\"] == \"Machine and User\":\n kwargs[\"sender\"] = None\n return get_messages(**kwargs)\n\n def build(\n self,\n sender: Optional[str] = \"Machine and User\",\n sender_name: Optional[str] = None,\n session_id: Optional[str] = None,\n n_messages: int = 5,\n order: Optional[str] = \"Descending\",\n record_template: Optional[str] = \"{sender_name}: {text}\",\n ) -> Text:\n messages = self.get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n messages_str = records_to_text(template=record_template or \"\", records=messages)\n self.status = messages_str\n return messages_str\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "n_messages": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 5,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "n_messages",
+ "display_name": "Number of Messages",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Number of messages to retrieve.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "order": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Descending",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Ascending", "Descending"],
+ "name": "order",
+ "display_name": "Order",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Order of the messages.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{sender_name}: {text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine and User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User", "Machine and User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "Session ID of the chat history.",
+ "load_from_db": false,
+ "title_case": false,
+ "value": "MySessionID"
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Retrieves stored chat messages given a specific Session ID.",
+ "icon": "history",
+ "base_classes": ["str", "Text", "object"],
+ "display_name": "Chat Memory",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "session_id": null,
+ "n_messages": null,
+ "order": null,
+ "record_template": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": true
+ },
+ "id": "MemoryComponent-cdA1J",
+ "description": "Retrieves stored chat messages given a specific Session ID.",
+ "display_name": "Chat Memory"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 489,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 1289.9606870058817,
+ "y": 442.16804561053766
+ }
+ },
+ {
+ "id": "Prompt-ODkUx",
+ "type": "genericNode",
+ "position": {
+ "x": 1894.594426342426,
+ "y": 753.3797365481901
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "{context}\n\nUser: {user_message}\nAI: ",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "context": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "context",
+ "display_name": "context",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "user_message": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "user_message",
+ "display_name": "user_message",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
+ },
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["Text", "str", "object"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["context", "user_message"]
+ },
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-ODkUx",
+ "description": "A component for creating prompt templates using dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 477,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 1894.594426342426,
+ "y": 753.3797365481901
+ }
+ },
+ {
+ "id": "OpenAIModel-9RykF",
+ "type": "genericNode",
+ "position": {
+ "x": 2561.5850334731617,
+ "y": 553.2745131130916
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-4-1106-preview",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "0.2",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["str", "object", "Text"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-9RykF"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 563,
+ "positionAbsolute": {
+ "x": 2561.5850334731617,
+ "y": 553.2745131130916
+ },
+ "dragging": false
+ },
+ {
+ "id": "TextOutput-vrs6T",
+ "type": "genericNode",
+ "position": {
+ "x": 1911.4785906252087,
+ "y": 247.39079954376987
+ },
+ "data": {
+ "type": "TextOutput",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": ["Record", "Text"],
+ "dynamic": false,
+ "info": "Text or Record to be passed as output.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Display a text output in the Playground.",
+ "icon": "type",
+ "base_classes": ["str", "object", "Text"],
+ "display_name": "Inspect Memory",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextOutput-vrs6T"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 289,
+ "positionAbsolute": {
+ "x": 1911.4785906252087,
+ "y": 247.39079954376987
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "MemoryComponent-cdA1J",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}",
+ "target": "Prompt-ODkUx",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "context",
+ "type": "str",
+ "id": "Prompt-ODkUx",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"]
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Text", "object"],
+ "dataType": "MemoryComponent",
+ "id": "MemoryComponent-cdA1J"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-MemoryComponent-cdA1J{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}-Prompt-ODkUx{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "selected": false
+ },
+ {
+ "source": "ChatInput-t7F8v",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-t7F8v\u0153}",
+ "target": "Prompt-ODkUx",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153user_message\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "user_message",
+ "type": "str",
+ "id": "Prompt-ODkUx",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"]
+ },
+ "sourceHandle": {
+ "baseClasses": ["Text", "object", "Record", "str"],
+ "dataType": "ChatInput",
+ "id": "ChatInput-t7F8v"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-ChatInput-t7F8v{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153object\u0153,\u0153Record\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-t7F8v\u0153}-Prompt-ODkUx{\u0153fieldName\u0153:\u0153user_message\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "selected": false
+ },
+ {
+ "source": "Prompt-ODkUx",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153}",
+ "target": "OpenAIModel-9RykF",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-9RykF",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Text", "str", "object"],
+ "dataType": "Prompt",
+ "id": "Prompt-ODkUx"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-ODkUx{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-ODkUx\u0153}-OpenAIModel-9RykF{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "OpenAIModel-9RykF",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153object\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153}",
+ "target": "ChatOutput-P1jEe",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-P1jEe\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-P1jEe",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "object", "Text"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-9RykF"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIModel-9RykF{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153object\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-9RykF\u0153}-ChatOutput-P1jEe{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-P1jEe\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "MemoryComponent-cdA1J",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}",
+ "target": "TextOutput-vrs6T",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-vrs6T\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "TextOutput-vrs6T",
+ "inputTypes": ["Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Text", "object"],
+ "dataType": "MemoryComponent",
+ "id": "MemoryComponent-cdA1J"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-foreground stroke-connection",
+ "id": "reactflow__edge-MemoryComponent-cdA1J{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153MemoryComponent\u0153,\u0153id\u0153:\u0153MemoryComponent-cdA1J\u0153}-TextOutput-vrs6T{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-vrs6T\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ }
+ ],
+ "viewport": {
+ "x": -569.862554459756,
+ "y": -42.08339711050985,
+ "zoom": 0.4868590524514978
+ }
+ },
+ "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.",
+ "name": "Memory Chatbot",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
}
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json
index 0d31698d0..9269eeed0 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json
@@ -1,1769 +1,1586 @@
{
- "id": "85392e54-20f3-4ab5-a179-cb4bef16f639",
- "data": {
- "nodes": [
- {
- "id": "Prompt-amqBu",
- "type": "genericNode",
- "position": {
- "x": 2191.5837146441663,
- "y": 1047.9307944451873
- },
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "You are a helpful assistant. Given a long document, your task is to create a concise summary that captures the main points and key details. The summary should be clear, accurate, and succinct. Please provide the summary in the format below:\n####\n{document}\n####\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "document": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "document",
- "display_name": "document",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "str",
- "Text"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "document"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-amqBu",
- "description": "Create a prompt template with dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 385,
- "positionAbsolute": {
- "x": 2191.5837146441663,
- "y": 1047.9307944451873
- },
- "dragging": false
+ "id": "85392e54-20f3-4ab5-a179-cb4bef16f639",
+ "data": {
+ "nodes": [
+ {
+ "id": "Prompt-amqBu",
+ "type": "genericNode",
+ "position": {
+ "x": 2191.5837146441663,
+ "y": 1047.9307944451873
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "You are a helpful assistant. Given a long document, your task is to create a concise summary that captures the main points and key details. The summary should be clear, accurate, and succinct. Please provide the summary in the format below:\n####\n{document}\n####\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "document": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "document",
+ "display_name": "document",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "Prompt-gTNiz",
- "type": "genericNode",
- "position": {
- "x": 3731.0813766902447,
- "y": 799.631909121391
- },
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Given a summary of an article, please create two multiple-choice questions that cover the key points and details mentioned. Ensure the questions are clear and provide three options (A, B, C), with one correct answer.\n####\n{summary}\n####",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "summary": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "summary",
- "display_name": "summary",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "str",
- "Text"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "summary"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-gTNiz",
- "description": "Create a prompt template with dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 385,
- "dragging": false
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["object", "str", "Text"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["document"]
},
- {
- "id": "ChatOutput-EJkG3",
- "type": "genericNode",
- "position": {
- "x": 3722.1747844849388,
- "y": 1283.413553222214
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n files: Optional[list[str]] = None,\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n files=files,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "In case of Message being a Record, this template will be used to convert it to text.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Summarizer",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "object",
- "Record",
- "Text",
- "str"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null,
- "record_template": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-EJkG3"
- },
- "selected": false,
- "width": 384,
- "height": 385,
- "dragging": false
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-amqBu",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 385,
+ "positionAbsolute": {
+ "x": 2191.5837146441663,
+ "y": 1047.9307944451873
+ },
+ "dragging": false
+ },
+ {
+ "id": "Prompt-gTNiz",
+ "type": "genericNode",
+ "position": {
+ "x": 3731.0813766902447,
+ "y": 799.631909121391
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Given a summary of an article, please create two multiple-choice questions that cover the key points and details mentioned. Ensure the questions are clear and provide three options (A, B, C), with one correct answer.\n####\n{summary}\n####",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "summary": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "summary",
+ "display_name": "summary",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "ChatOutput-DNmvg",
- "type": "genericNode",
- "position": {
- "x": 5077.71285886074,
- "y": 1232.9152769735522
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n files: Optional[list[str]] = None,\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n files=files,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "In case of Message being a Record, this template will be used to convert it to text.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Question Generator",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "object",
- "Record",
- "Text",
- "str"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null,
- "record_template": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-DNmvg"
- },
- "selected": false,
- "width": 384,
- "height": 385
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["object", "str", "Text"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["summary"]
},
- {
- "id": "TextInput-sptaH",
- "type": "genericNode",
- "position": {
- "x": 1700.5624822024752,
- "y": 1039.603088937466
- },
- "data": {
- "type": "TextInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as input.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Optional[Text] = \"\",\n record_template: Optional[str] = \"\",\n ) -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "Revolutionary Nano-Battery Technology Unveiled In a groundbreaking announcement yesterday, researchers from the fictional Tech Innovations Institute revealed the development of a new nano-battery technology that promises to revolutionize energy storage. The new battery, dubbed the \"EnerGCell\", uses advanced nanomaterials to achieve unprecedented efficiency and storage capacities. According to lead researcher Dr. Ada Byron, the EnerGCell can store up to ten times more energy than the best lithium-ion batteries available today, while charging in just a fraction of the time. \"We're talking about charging your electric vehicle in just five minutes for a range of over 1,000 miles,\" Dr. Byron stated during the press conference. The technology behind the EnerGCell involves a complex arrangement of nanostructured electrodes that allow for rapid ion transfer and extremely high energy density. This breakthrough was achieved after a decade of research into nanomaterials and their applications in energy storage. The implications of this technology are vast, promising to accelerate the adoption of renewable energy by making it more practical and affordable to store wind and solar power. It could also lead to significant advancements in electric vehicles, mobile devices, and any other technology that relies on batteries. Despite the excitement, some experts are calling for patience, noting that the EnerGCell is still in its early stages of development and may take several years before it's commercially available. However, the potential impact of such a technology on the environment and the global economy is undeniable. Tech Innovations Institute plans to continue refining the EnerGCell and begin pilot projects with select partners in the coming year. If successful, this nano-battery technology could indeed be the breakthrough needed to usher in a new era of clean energy and technology.",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as input.",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Get text inputs from the Playground.",
- "icon": "type",
- "base_classes": [
- "str",
- "Text",
- "object"
- ],
- "display_name": "Text Input",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextInput-sptaH"
- },
- "selected": false,
- "width": 384,
- "height": 290,
- "positionAbsolute": {
- "x": 1700.5624822024752,
- "y": 1039.603088937466
- },
- "dragging": false
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-gTNiz",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 385,
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-EJkG3",
+ "type": "genericNode",
+ "position": {
+ "x": 3722.1747844849388,
+ "y": 1283.413553222214
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Summarizer",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "TextOutput-2MS4a",
- "type": "genericNode",
- "position": {
- "x": 2917.216113690115,
- "y": 513.0058511435552
- },
- "data": {
- "type": "TextOutput",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as output.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a text output in the Playground.",
- "icon": "type",
- "base_classes": [
- "str",
- "Text",
- "object"
- ],
- "display_name": "First Prompt",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextOutput-2MS4a"
- },
- "selected": false,
- "width": 384,
- "height": 290,
- "positionAbsolute": {
- "x": 2917.216113690115,
- "y": 513.0058511435552
- },
- "dragging": false
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["object", "Record", "Text", "str"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
},
- {
- "id": "OpenAIModel-uYXZJ",
- "type": "genericNode",
- "position": {
- "x": 2925.784767523062,
- "y": 933.6465680967775
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-4-turbo-preview",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4o",
- "gpt-4-turbo",
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0125"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 0.1,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "str",
- "Text",
- "object"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-uYXZJ"
- },
- "selected": false,
- "width": 384,
- "height": 565,
- "positionAbsolute": {
- "x": 2925.784767523062,
- "y": 933.6465680967775
- },
- "dragging": false
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-EJkG3"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 385,
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-DNmvg",
+ "type": "genericNode",
+ "position": {
+ "x": 5077.71285886074,
+ "y": 1232.9152769735522
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Question Generator",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "TextOutput-MUDOR",
- "type": "genericNode",
- "position": {
- "x": 4446.064323520379,
- "y": 633.833297518702
- },
- "data": {
- "type": "TextOutput",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as output.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a text output in the Playground.",
- "icon": "type",
- "base_classes": [
- "str",
- "Text",
- "object"
- ],
- "display_name": "Second Prompt",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextOutput-MUDOR"
- },
- "selected": false,
- "width": 384,
- "height": 290,
- "dragging": false,
- "positionAbsolute": {
- "x": 4446.064323520379,
- "y": 633.833297518702
- }
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["object", "Record", "Text", "str"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
},
- {
- "id": "OpenAIModel-XawYB",
- "type": "genericNode",
- "position": {
- "x": 4500.152018344182,
- "y": 1027.7382026227656
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-4-turbo-preview",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4o",
- "gpt-4-turbo",
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0125"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 0.1,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "str",
- "Text",
- "object"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-XawYB"
- },
- "selected": false,
- "width": 384,
- "height": 565,
- "positionAbsolute": {
- "x": 4500.152018344182,
- "y": 1027.7382026227656
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "TextInput-sptaH",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-sptaH\u0153}",
- "target": "Prompt-amqBu",
- "targetHandle": "{\u0153fieldName\u0153:\u0153document\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "document",
- "id": "Prompt-amqBu",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Text",
- "object"
- ],
- "dataType": "TextInput",
- "id": "TextInput-sptaH"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-TextInput-sptaH{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-sptaH\u0153}-Prompt-amqBu{\u0153fieldName\u0153:\u0153document\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-DNmvg"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 385
+ },
+ {
+ "id": "TextInput-sptaH",
+ "type": "genericNode",
+ "position": {
+ "x": 1700.5624822024752,
+ "y": 1039.603088937466
+ },
+ "data": {
+ "type": "TextInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as input.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Optional[Text] = \"\",\n record_template: Optional[str] = \"\",\n ) -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "Revolutionary Nano-Battery Technology Unveiled In a groundbreaking announcement yesterday, researchers from the fictional Tech Innovations Institute revealed the development of a new nano-battery technology that promises to revolutionize energy storage. The new battery, dubbed the \"EnerGCell\", uses advanced nanomaterials to achieve unprecedented efficiency and storage capacities. According to lead researcher Dr. Ada Byron, the EnerGCell can store up to ten times more energy than the best lithium-ion batteries available today, while charging in just a fraction of the time. \"We're talking about charging your electric vehicle in just five minutes for a range of over 1,000 miles,\" Dr. Byron stated during the press conference. The technology behind the EnerGCell involves a complex arrangement of nanostructured electrodes that allow for rapid ion transfer and extremely high energy density. This breakthrough was achieved after a decade of research into nanomaterials and their applications in energy storage. The implications of this technology are vast, promising to accelerate the adoption of renewable energy by making it more practical and affordable to store wind and solar power. It could also lead to significant advancements in electric vehicles, mobile devices, and any other technology that relies on batteries. Despite the excitement, some experts are calling for patience, noting that the EnerGCell is still in its early stages of development and may take several years before it's commercially available. However, the potential impact of such a technology on the environment and the global economy is undeniable. Tech Innovations Institute plans to continue refining the EnerGCell and begin pilot projects with select partners in the coming year. If successful, this nano-battery technology could indeed be the breakthrough needed to usher in a new era of clean energy and technology.",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": ["Record", "Text"],
+ "dynamic": false,
+ "info": "Text or Record to be passed as input.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "Prompt-amqBu",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}",
- "target": "TextOutput-2MS4a",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-2MS4a\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "TextOutput-2MS4a",
- "inputTypes": [
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "str",
- "Text"
- ],
- "dataType": "Prompt",
- "id": "Prompt-amqBu"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-amqBu{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}-TextOutput-2MS4a{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-2MS4a\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "description": "Get text inputs from the Playground.",
+ "icon": "type",
+ "base_classes": ["str", "Text", "object"],
+ "display_name": "Text Input",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
},
- {
- "source": "Prompt-amqBu",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}",
- "target": "OpenAIModel-uYXZJ",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-uYXZJ",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "str",
- "Text"
- ],
- "dataType": "Prompt",
- "id": "Prompt-amqBu"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-amqBu{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}-OpenAIModel-uYXZJ{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextInput-sptaH"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 290,
+ "positionAbsolute": {
+ "x": 1700.5624822024752,
+ "y": 1039.603088937466
+ },
+ "dragging": false
+ },
+ {
+ "id": "TextOutput-2MS4a",
+ "type": "genericNode",
+ "position": {
+ "x": 2917.216113690115,
+ "y": 513.0058511435552
+ },
+ "data": {
+ "type": "TextOutput",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": ["Record", "Text"],
+ "dynamic": false,
+ "info": "Text or Record to be passed as output.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIModel-uYXZJ",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}",
- "target": "Prompt-gTNiz",
- "targetHandle": "{\u0153fieldName\u0153:\u0153summary\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "summary",
- "id": "Prompt-gTNiz",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Text",
- "object"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-uYXZJ"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIModel-uYXZJ{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}-Prompt-gTNiz{\u0153fieldName\u0153:\u0153summary\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "description": "Display a text output in the Playground.",
+ "icon": "type",
+ "base_classes": ["str", "Text", "object"],
+ "display_name": "First Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
},
- {
- "source": "OpenAIModel-uYXZJ",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}",
- "target": "ChatOutput-EJkG3",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-EJkG3\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-EJkG3",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Text",
- "object"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-uYXZJ"
- }
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextOutput-2MS4a"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 290,
+ "positionAbsolute": {
+ "x": 2917.216113690115,
+ "y": 513.0058511435552
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIModel-uYXZJ",
+ "type": "genericNode",
+ "position": {
+ "x": 2925.784767523062,
+ "y": 933.6465680967775
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-4-turbo-preview",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIModel-uYXZJ{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}-ChatOutput-EJkG3{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-EJkG3\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "Prompt-gTNiz",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}",
- "target": "TextOutput-MUDOR",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-MUDOR\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "TextOutput-MUDOR",
- "inputTypes": [
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "str",
- "Text"
- ],
- "dataType": "Prompt",
- "id": "Prompt-gTNiz"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-gTNiz{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}-TextOutput-MUDOR{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-MUDOR\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["str", "Text", "object"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
},
- {
- "source": "Prompt-gTNiz",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}",
- "target": "OpenAIModel-XawYB",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-XawYB",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "str",
- "Text"
- ],
- "dataType": "Prompt",
- "id": "Prompt-gTNiz"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-Prompt-gTNiz{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}-OpenAIModel-XawYB{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-uYXZJ"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 565,
+ "positionAbsolute": {
+ "x": 2925.784767523062,
+ "y": 933.6465680967775
+ },
+ "dragging": false
+ },
+ {
+ "id": "TextOutput-MUDOR",
+ "type": "genericNode",
+ "position": {
+ "x": 4446.064323520379,
+ "y": 633.833297518702
+ },
+ "data": {
+ "type": "TextOutput",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": ["Record", "Text"],
+ "dynamic": false,
+ "info": "Text or Record to be passed as output.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIModel-XawYB",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153}",
- "target": "ChatOutput-DNmvg",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-DNmvg\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-DNmvg",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "str",
- "Text",
- "object"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-XawYB"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIModel-XawYB{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153}-ChatOutput-DNmvg{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-DNmvg\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- }
- ],
- "viewport": {
- "x": -383.7251879618552,
- "y": 69.19813933800037,
- "zoom": 0.3105753483695743
+ "description": "Display a text output in the Playground.",
+ "icon": "type",
+ "base_classes": ["str", "Text", "object"],
+ "display_name": "Second Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextOutput-MUDOR"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 290,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 4446.064323520379,
+ "y": 633.833297518702
}
- },
- "description": "The Prompt Chaining flow chains prompts with LLMs, refining outputs through iterative stages.",
- "name": "Prompt Chaining",
- "last_tested_version": "1.0.0a0",
- "is_component": false
+ },
+ {
+ "id": "OpenAIModel-XawYB",
+ "type": "genericNode",
+ "position": {
+ "x": 4500.152018344182,
+ "y": 1027.7382026227656
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-4-turbo-preview",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": ""
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["str", "Text", "object"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
+ },
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-XawYB"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 565,
+ "positionAbsolute": {
+ "x": 4500.152018344182,
+ "y": 1027.7382026227656
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "TextInput-sptaH",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-sptaH\u0153}",
+ "target": "Prompt-amqBu",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153document\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "document",
+ "id": "Prompt-amqBu",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Text", "object"],
+ "dataType": "TextInput",
+ "id": "TextInput-sptaH"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-TextInput-sptaH{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153TextInput\u0153,\u0153id\u0153:\u0153TextInput-sptaH\u0153}-Prompt-amqBu{\u0153fieldName\u0153:\u0153document\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "Prompt-amqBu",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}",
+ "target": "TextOutput-2MS4a",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-2MS4a\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "TextOutput-2MS4a",
+ "inputTypes": ["Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "str", "Text"],
+ "dataType": "Prompt",
+ "id": "Prompt-amqBu"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-amqBu{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}-TextOutput-2MS4a{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-2MS4a\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "Prompt-amqBu",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}",
+ "target": "OpenAIModel-uYXZJ",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-uYXZJ",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "str", "Text"],
+ "dataType": "Prompt",
+ "id": "Prompt-amqBu"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-amqBu{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-amqBu\u0153}-OpenAIModel-uYXZJ{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "OpenAIModel-uYXZJ",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}",
+ "target": "Prompt-gTNiz",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153summary\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "summary",
+ "id": "Prompt-gTNiz",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Text", "object"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-uYXZJ"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIModel-uYXZJ{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}-Prompt-gTNiz{\u0153fieldName\u0153:\u0153summary\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "OpenAIModel-uYXZJ",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}",
+ "target": "ChatOutput-EJkG3",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-EJkG3\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-EJkG3",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Text", "object"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-uYXZJ"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIModel-uYXZJ{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-uYXZJ\u0153}-ChatOutput-EJkG3{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-EJkG3\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "Prompt-gTNiz",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}",
+ "target": "TextOutput-MUDOR",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-MUDOR\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "TextOutput-MUDOR",
+ "inputTypes": ["Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "str", "Text"],
+ "dataType": "Prompt",
+ "id": "Prompt-gTNiz"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-gTNiz{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}-TextOutput-MUDOR{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-MUDOR\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "Prompt-gTNiz",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}",
+ "target": "OpenAIModel-XawYB",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-XawYB",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "str", "Text"],
+ "dataType": "Prompt",
+ "id": "Prompt-gTNiz"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-Prompt-gTNiz{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153str\u0153,\u0153Text\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-gTNiz\u0153}-OpenAIModel-XawYB{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "OpenAIModel-XawYB",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153}",
+ "target": "ChatOutput-DNmvg",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-DNmvg\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-DNmvg",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["str", "Text", "object"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-XawYB"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIModel-XawYB{\u0153baseClasses\u0153:[\u0153str\u0153,\u0153Text\u0153,\u0153object\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-XawYB\u0153}-ChatOutput-DNmvg{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-DNmvg\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ }
+ ],
+ "viewport": {
+ "x": -383.7251879618552,
+ "y": 69.19813933800037,
+ "zoom": 0.3105753483695743
+ }
+ },
+ "description": "The Prompt Chaining flow chains prompts with LLMs, refining outputs through iterative stages.",
+ "name": "Prompt Chaining",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
}
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json
index 979f35403..f95db5a8d 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json
@@ -1,3407 +1,3151 @@
{
- "id": "51e2b78a-199b-4054-9f32-e288eef6924c",
- "data": {
- "nodes": [
- {
- "id": "ChatInput-yxMKE",
- "type": "genericNode",
- "position": {
- "x": 1195.5276981160775,
- "y": 209.421875
- },
- "data": {
- "type": "ChatInput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n files: Optional[list[str]] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n files=files,\n session_id=session_id,\n return_record=return_record,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "value": "what is a line"
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "User",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Get chat inputs from the Playground.",
- "icon": "ChatInput",
- "base_classes": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "display_name": "Chat Input",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatInput-yxMKE"
- },
- "selected": false,
- "width": 384,
- "height": 383
+ "id": "51e2b78a-199b-4054-9f32-e288eef6924c",
+ "data": {
+ "nodes": [
+ {
+ "id": "ChatInput-yxMKE",
+ "type": "genericNode",
+ "position": {
+ "x": 1195.5276981160775,
+ "y": 209.421875
+ },
+ "data": {
+ "type": "ChatInput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": [],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "value": "what is a line"
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "User",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "TextOutput-BDknO",
- "type": "genericNode",
- "position": {
- "x": 2322.600672827879,
- "y": 604.9467307442569
- },
- "data": {
- "type": "TextOutput",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Value",
- "advanced": false,
- "input_types": [
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "Text or Record to be passed as output.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a text output in the Playground.",
- "icon": "type",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "Extracted Chunks",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "record_template": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "TextOutput-BDknO"
- },
- "selected": false,
- "width": 384,
- "height": 289,
- "positionAbsolute": {
- "x": 2322.600672827879,
- "y": 604.9467307442569
- },
- "dragging": false
+ "description": "Get chat inputs from the Playground.",
+ "icon": "ChatInput",
+ "base_classes": ["Text", "str", "object", "Record"],
+ "display_name": "Chat Input",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null
},
- {
- "id": "OpenAIEmbeddings-ZlOk1",
- "type": "genericNode",
- "position": {
- "x": 1183.667250865064,
- "y": 687.3171828430261
- },
- "data": {
- "type": "OpenAIEmbeddings",
- "node": {
- "template": {
- "allowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "allowed_special",
- "display_name": "Allowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "client": {
- "type": "Any",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "client",
- "display_name": "Client",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_headers": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_headers",
- "display_name": "Default Headers",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_query": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_query",
- "display_name": "Default Query",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "deployment": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "deployment",
- "display_name": "Deployment",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "disallowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [
- "all"
- ],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "disallowed_special",
- "display_name": "Disallowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "embedding_ctx_length": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 8191,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding_ctx_length",
- "display_name": "Embedding Context Length",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_retries": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 6,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_retries",
- "display_name": "Max Retries",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "text-embedding-3-small",
- "text-embedding-3-large",
- "text-embedding-ada-002"
- ],
- "name": "model",
- "display_name": "Model",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "openai_api_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_type",
- "display_name": "OpenAI API Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_version": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_version",
- "display_name": "OpenAI API Version",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_organization": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_organization",
- "display_name": "OpenAI Organization",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_proxy": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_proxy",
- "display_name": "OpenAI Proxy",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "request_timeout": {
- "type": "float",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "request_timeout",
- "display_name": "Request Timeout",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "show_progress_bar": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "show_progress_bar",
- "display_name": "Show Progress Bar",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "skip_empty": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "skip_empty",
- "display_name": "Skip Empty",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_enable": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_enable",
- "display_name": "TikToken Enable",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_model_name",
- "display_name": "TikToken Model Name",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Generate embeddings using OpenAI models.",
- "base_classes": [
- "Embeddings"
- ],
- "display_name": "OpenAI Embeddings",
- "documentation": "",
- "custom_fields": {
- "openai_api_key": null,
- "default_headers": null,
- "default_query": null,
- "allowed_special": null,
- "disallowed_special": null,
- "chunk_size": null,
- "client": null,
- "deployment": null,
- "embedding_ctx_length": null,
- "max_retries": null,
- "model": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "openai_api_type": null,
- "openai_api_version": null,
- "openai_organization": null,
- "openai_proxy": null,
- "request_timeout": null,
- "show_progress_bar": null,
- "skip_empty": null,
- "tiktoken_enable": null,
- "tiktoken_model_name": null
- },
- "output_types": [
- "Embeddings"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "OpenAIEmbeddings-ZlOk1"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "dragging": false
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatInput-yxMKE"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383
+ },
+ {
+ "id": "TextOutput-BDknO",
+ "type": "genericNode",
+ "position": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "data": {
+ "type": "TextOutput",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Value",
+ "advanced": false,
+ "input_types": ["Record", "Text"],
+ "dynamic": false,
+ "info": "Text or Record to be passed as output.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "OpenAIModel-EjXlN",
- "type": "genericNode",
- "position": {
- "x": 3410.117202077183,
- "y": 431.2038048137648
- },
- "data": {
- "type": "OpenAIModel",
- "node": {
- "template": {
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_tokens": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 256,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_tokens",
- "display_name": "Max Tokens",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "gpt-3.5-turbo",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "gpt-4o",
- "gpt-4-turbo",
- "gpt-4-turbo-preview",
- "gpt-3.5-turbo",
- "gpt-3.5-turbo-0125"
- ],
- "name": "model_name",
- "display_name": "Model Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "The OpenAI API Key to use for the OpenAI model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "stream": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "stream",
- "display_name": "Stream",
- "advanced": true,
- "dynamic": false,
- "info": "Stream the response from the model. Streaming works only in Chat.",
- "load_from_db": false,
- "title_case": false
- },
- "system_message": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "system_message",
- "display_name": "System Message",
- "advanced": true,
- "dynamic": false,
- "info": "System message to pass to the model.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "temperature": {
- "type": "float",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 0.1,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "temperature",
- "display_name": "Temperature",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "Generates text using OpenAI LLMs.",
- "icon": "OpenAI",
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "display_name": "OpenAI",
- "documentation": "",
- "custom_fields": {
- "input_value": null,
- "openai_api_key": null,
- "temperature": null,
- "model_name": null,
- "max_tokens": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "stream": null,
- "system_message": null
- },
- "output_types": [
- "Text"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "max_tokens",
- "model_kwargs",
- "model_name",
- "openai_api_base",
- "openai_api_key",
- "temperature",
- "input_value",
- "system_message",
- "stream"
- ],
- "beta": false
- },
- "id": "OpenAIModel-EjXlN"
- },
- "selected": true,
- "width": 384,
- "height": 563,
- "positionAbsolute": {
- "x": 3410.117202077183,
- "y": 431.2038048137648
- },
- "dragging": false
+ "description": "Display a text output in the Playground.",
+ "icon": "type",
+ "base_classes": ["object", "Text", "str"],
+ "display_name": "Extracted Chunks",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "record_template": null
},
- {
- "id": "Prompt-xeI6K",
- "type": "genericNode",
- "position": {
- "x": 2969.0261961391298,
- "y": 442.1613649809069
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "TextOutput-BDknO"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 289,
+ "positionAbsolute": {
+ "x": 2322.600672827879,
+ "y": 604.9467307442569
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-ZlOk1",
+ "type": "genericNode",
+ "position": {
+ "x": 1183.667250865064,
+ "y": 687.3171828430261
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, NestedDict\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": ["all"],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "data": {
- "type": "Prompt",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "template": {
- "type": "prompt",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "template",
- "display_name": "Template",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent",
- "context": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "context",
- "display_name": "context",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- },
- "question": {
- "field_type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "question",
- "display_name": "question",
- "advanced": false,
- "input_types": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "type": "str"
- }
- },
- "description": "Create a prompt template with dynamic variables.",
- "icon": "prompts",
- "is_input": null,
- "is_output": null,
- "is_composition": null,
- "base_classes": [
- "object",
- "Text",
- "str"
- ],
- "name": "",
- "display_name": "Prompt",
- "documentation": "",
- "custom_fields": {
- "template": [
- "context",
- "question"
- ]
- },
- "output_types": [
- "Text"
- ],
- "full_path": null,
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false,
- "error": null
- },
- "id": "Prompt-xeI6K",
- "description": "Create a prompt template with dynamic variables.",
- "display_name": "Prompt"
- },
- "selected": false,
- "width": 384,
- "height": 477,
- "positionAbsolute": {
- "x": 2969.0261961391298,
- "y": 442.1613649809069
- },
- "dragging": false
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "ChatOutput-Q39I8",
- "type": "genericNode",
- "position": {
- "x": 3887.2073667611485,
- "y": 588.4801225794856
- },
- "data": {
- "type": "ChatOutput",
- "node": {
- "template": {
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n files: Optional[list[str]] = None,\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n files=files,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Message",
- "advanced": false,
- "input_types": [
- "Text"
- ],
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "record_template": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "{text}",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "record_template",
- "display_name": "Record Template",
- "advanced": true,
- "dynamic": false,
- "info": "In case of Message being a Record, this template will be used to convert it to text.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "return_record": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "return_record",
- "display_name": "Return Record",
- "advanced": true,
- "dynamic": false,
- "info": "Return the message as a record containing the sender, sender_name, and session_id.",
- "load_from_db": false,
- "title_case": false
- },
- "sender": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Machine",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Machine",
- "User"
- ],
- "name": "sender",
- "display_name": "Sender Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "sender_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "AI",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "sender_name",
- "display_name": "Sender Name",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "session_id": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "session_id",
- "display_name": "Session ID",
- "advanced": true,
- "dynamic": false,
- "info": "If provided, the message will be stored in the memory.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Display a chat message in the Playground.",
- "icon": "ChatOutput",
- "base_classes": [
- "object",
- "Text",
- "Record",
- "str"
- ],
- "display_name": "Chat Output",
- "documentation": "",
- "custom_fields": {
- "sender": null,
- "sender_name": null,
- "input_value": null,
- "session_id": null,
- "return_record": null,
- "record_template": null
- },
- "output_types": [
- "Text",
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "ChatOutput-Q39I8"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "positionAbsolute": {
- "x": 3887.2073667611485,
- "y": 588.4801225794856
- },
- "dragging": false
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": ["Embeddings"],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
},
- {
- "id": "File-t0a6a",
- "type": "genericNode",
- "position": {
- "x": 2257.233450682836,
- "y": 1747.5389618367233
+ "output_types": ["Embeddings"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-ZlOk1"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "dragging": false
+ },
+ {
+ "id": "OpenAIModel-EjXlN",
+ "type": "genericNode",
+ "position": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "data": {
+ "type": "OpenAIModel",
+ "node": {
+ "template": {
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_tokens": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 256,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_tokens",
+ "display_name": "Max Tokens",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "gpt-3.5-turbo",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "name": "model_name",
+ "display_name": "Model Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "stream": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "stream",
+ "display_name": "Stream",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "system_message": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "system_message",
+ "display_name": "System Message",
+ "advanced": true,
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "temperature": {
+ "type": "float",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 0.1,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "temperature",
+ "display_name": "Temperature",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
},
- "data": {
- "type": "File",
- "node": {
- "template": {
- "path": {
- "type": "file",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [
- ".txt",
- ".md",
- ".mdx",
- ".csv",
- ".json",
- ".yaml",
- ".yml",
- ".xml",
- ".html",
- ".htm",
- ".pdf",
- ".docx",
- ".py",
- ".sh",
- ".sql",
- ".js",
- ".ts",
- ".tsx"
- ],
- "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf",
- "password": false,
- "name": "path",
- "display_name": "Path",
- "advanced": false,
- "dynamic": false,
- "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx",
- "load_from_db": false,
- "title_case": false,
- "value": ""
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "silent_errors": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "silent_errors",
- "display_name": "Silent Errors",
- "advanced": true,
- "dynamic": false,
- "info": "If true, errors will not raise an exception.",
- "load_from_db": false,
- "title_case": false
- },
- "_type": "CustomComponent"
- },
- "description": "A generic file loader.",
- "icon": "file-text",
- "base_classes": [
- "Record"
- ],
- "display_name": "File",
- "documentation": "",
- "custom_fields": {
- "path": null,
- "silent_errors": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "File-t0a6a"
- },
- "selected": false,
- "width": 384,
- "height": 281,
- "positionAbsolute": {
- "x": 2257.233450682836,
- "y": 1747.5389618367233
- },
- "dragging": false
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "id": "RecursiveCharacterTextSplitter-tR9QM",
- "type": "genericNode",
- "position": {
- "x": 2791.013514133929,
- "y": 1462.9588953494142
- },
- "data": {
- "type": "RecursiveCharacterTextSplitter",
- "node": {
- "template": {
- "inputs": {
- "type": "Document",
- "required": true,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "inputs",
- "display_name": "Input",
- "advanced": false,
- "input_types": [
- "Document",
- "Record"
- ],
- "dynamic": false,
- "info": "The texts to split.",
- "load_from_db": false,
- "title_case": false
- },
- "chunk_overlap": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 200,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_overlap",
- "display_name": "Chunk Overlap",
- "advanced": false,
- "dynamic": false,
- "info": "The amount of overlap between chunks.",
- "load_from_db": false,
- "title_case": false
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": false,
- "dynamic": false,
- "info": "The maximum length of each chunk.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "separators": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "separators",
- "display_name": "Separators",
- "advanced": false,
- "dynamic": false,
- "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": [
- ""
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Split text into chunks of a specified length.",
- "base_classes": [
- "Record"
- ],
- "display_name": "Recursive Character Text Splitter",
- "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter",
- "custom_fields": {
- "inputs": null,
- "separators": null,
- "chunk_size": null,
- "chunk_overlap": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "RecursiveCharacterTextSplitter-tR9QM"
- },
- "selected": false,
- "width": 384,
- "height": 501,
- "positionAbsolute": {
- "x": 2791.013514133929,
- "y": 1462.9588953494142
- },
- "dragging": false
+ "description": "Generates text using OpenAI LLMs.",
+ "icon": "OpenAI",
+ "base_classes": ["object", "Text", "str"],
+ "display_name": "OpenAI",
+ "documentation": "",
+ "custom_fields": {
+ "input_value": null,
+ "openai_api_key": null,
+ "temperature": null,
+ "model_name": null,
+ "max_tokens": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "stream": null,
+ "system_message": null
},
- {
- "id": "AstraDBSearch-41nRz",
- "type": "genericNode",
- "position": {
- "x": 1723.976434815103,
- "y": 277.03317407245913
- },
- "data": {
- "type": "AstraDBSearch",
- "node": {
- "template": {
- "embedding": {
- "type": "Embeddings",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding",
- "display_name": "Embedding",
- "advanced": false,
- "dynamic": false,
- "info": "Embedding to use",
- "load_from_db": false,
- "title_case": false
- },
- "input_value": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "input_value",
- "display_name": "Input Value",
- "advanced": false,
- "dynamic": false,
- "info": "Input value to search",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "api_endpoint": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "api_endpoint",
- "display_name": "API Endpoint",
- "advanced": false,
- "dynamic": false,
- "info": "API endpoint URL for the Astra DB service.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "batch_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "batch_size",
- "display_name": "Batch Size",
- "advanced": true,
- "dynamic": false,
- "info": "Optional number of records to process in a single batch.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_delete_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_delete_concurrency",
- "display_name": "Bulk Delete Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk delete operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_batch_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_batch_concurrency",
- "display_name": "Bulk Insert Batch Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_overwrite_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_overwrite_concurrency",
- "display_name": "Bulk Insert Overwrite Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "collection_indexing_policy": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_indexing_policy",
- "display_name": "Collection Indexing Policy",
- "advanced": true,
- "dynamic": false,
- "info": "Optional dictionary defining the indexing policy for the collection.",
- "load_from_db": false,
- "title_case": false
- },
- "collection_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_name",
- "display_name": "Collection Name",
- "advanced": false,
- "dynamic": false,
- "info": "The name of the collection within Astra DB where the vectors will be stored.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "langflow"
- },
- "metadata_indexing_exclude": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_exclude",
- "display_name": "Metadata Indexing Exclude",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to exclude from the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metadata_indexing_include": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_include",
- "display_name": "Metadata Indexing Include",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to include in the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metric": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metric",
- "display_name": "Metric",
- "advanced": true,
- "dynamic": false,
- "info": "Optional distance metric for vector comparisons in the vector store.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "namespace": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "namespace",
- "display_name": "Namespace",
- "advanced": true,
- "dynamic": false,
- "info": "Optional namespace within Astra DB to use for the collection.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "number_of_results": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 4,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "number_of_results",
- "display_name": "Number of Results",
- "advanced": true,
- "dynamic": false,
- "info": "Number of results to return.",
- "load_from_db": false,
- "title_case": false
- },
- "pre_delete_collection": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "pre_delete_collection",
- "display_name": "Pre Delete Collection",
- "advanced": true,
- "dynamic": false,
- "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
- "load_from_db": false,
- "title_case": false
- },
- "search_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Similarity",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Similarity",
- "MMR"
- ],
- "name": "search_type",
- "display_name": "Search Type",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "setup_mode": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Sync",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Sync",
- "Async",
- "Off"
- ],
- "name": "setup_mode",
- "display_name": "Setup Mode",
- "advanced": true,
- "dynamic": false,
- "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "token": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "token",
- "display_name": "Token",
- "advanced": false,
- "dynamic": false,
- "info": "Authentication token for accessing Astra DB.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "_type": "CustomComponent"
- },
- "description": "Searches an existing Astra DB Vector Store.",
- "icon": "AstraDB",
- "base_classes": [
- "Record"
- ],
- "display_name": "Astra DB Search",
- "documentation": "",
- "custom_fields": {
- "embedding": null,
- "collection_name": null,
- "input_value": null,
- "token": null,
- "api_endpoint": null,
- "search_type": null,
- "number_of_results": null,
- "namespace": null,
- "metric": null,
- "batch_size": null,
- "bulk_insert_batch_concurrency": null,
- "bulk_insert_overwrite_concurrency": null,
- "bulk_delete_concurrency": null,
- "setup_mode": null,
- "pre_delete_collection": null,
- "metadata_indexing_include": null,
- "metadata_indexing_exclude": null,
- "collection_indexing_policy": null
- },
- "output_types": [
- "Record"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "token",
- "api_endpoint",
- "collection_name",
- "input_value",
- "embedding"
- ],
- "beta": false
- },
- "id": "AstraDBSearch-41nRz"
- },
- "selected": false,
- "width": 384,
- "height": 713,
- "dragging": false,
- "positionAbsolute": {
- "x": 1723.976434815103,
- "y": 277.03317407245913
- }
+ "output_types": ["Text"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "max_tokens",
+ "model_kwargs",
+ "model_name",
+ "openai_api_base",
+ "openai_api_key",
+ "temperature",
+ "input_value",
+ "system_message",
+ "stream"
+ ],
+ "beta": false
+ },
+ "id": "OpenAIModel-EjXlN"
+ },
+ "selected": true,
+ "width": 384,
+ "height": 563,
+ "positionAbsolute": {
+ "x": 3410.117202077183,
+ "y": 431.2038048137648
+ },
+ "dragging": false
+ },
+ {
+ "id": "Prompt-xeI6K",
+ "type": "genericNode",
+ "position": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "data": {
+ "type": "Prompt",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "template": {
+ "type": "prompt",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "template",
+ "display_name": "Template",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent",
+ "context": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "context",
+ "display_name": "context",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ },
+ "question": {
+ "field_type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "question",
+ "display_name": "question",
+ "advanced": false,
+ "input_types": [
+ "Document",
+ "BaseOutputParser",
+ "Record",
+ "Text"
+ ],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "type": "str"
+ }
},
- {
- "id": "AstraDB-eUCSS",
- "type": "genericNode",
- "position": {
- "x": 3372.04958055989,
- "y": 1611.0742035495277
- },
- "data": {
- "type": "AstraDB",
- "node": {
- "template": {
- "embedding": {
- "type": "Embeddings",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding",
- "display_name": "Embedding",
- "advanced": false,
- "dynamic": false,
- "info": "Embedding to use",
- "load_from_db": false,
- "title_case": false
- },
- "inputs": {
- "type": "Record",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "inputs",
- "display_name": "Inputs",
- "advanced": false,
- "dynamic": false,
- "info": "Optional list of records to be processed and stored in the vector store.",
- "load_from_db": false,
- "title_case": false
- },
- "api_endpoint": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "api_endpoint",
- "display_name": "API Endpoint",
- "advanced": false,
- "dynamic": false,
- "info": "API endpoint URL for the Astra DB service.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "batch_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "batch_size",
- "display_name": "Batch Size",
- "advanced": true,
- "dynamic": false,
- "info": "Optional number of records to process in a single batch.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_delete_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_delete_concurrency",
- "display_name": "Bulk Delete Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk delete operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_batch_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_batch_concurrency",
- "display_name": "Bulk Insert Batch Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations.",
- "load_from_db": false,
- "title_case": false
- },
- "bulk_insert_overwrite_concurrency": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "bulk_insert_overwrite_concurrency",
- "display_name": "Bulk Insert Overwrite Concurrency",
- "advanced": true,
- "dynamic": false,
- "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import List, Optional, Union\n\nfrom langchain.schema import BaseRetriever\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "collection_indexing_policy": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_indexing_policy",
- "display_name": "Collection Indexing Policy",
- "advanced": true,
- "dynamic": false,
- "info": "Optional dictionary defining the indexing policy for the collection.",
- "load_from_db": false,
- "title_case": false
- },
- "collection_name": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "collection_name",
- "display_name": "Collection Name",
- "advanced": false,
- "dynamic": false,
- "info": "The name of the collection within Astra DB where the vectors will be stored.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": "langflow"
- },
- "metadata_indexing_exclude": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_exclude",
- "display_name": "Metadata Indexing Exclude",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to exclude from the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metadata_indexing_include": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metadata_indexing_include",
- "display_name": "Metadata Indexing Include",
- "advanced": true,
- "dynamic": false,
- "info": "Optional list of metadata fields to include in the indexing.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "metric": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "metric",
- "display_name": "Metric",
- "advanced": true,
- "dynamic": false,
- "info": "Optional distance metric for vector comparisons in the vector store.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "namespace": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "namespace",
- "display_name": "Namespace",
- "advanced": true,
- "dynamic": false,
- "info": "Optional namespace within Astra DB to use for the collection.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "pre_delete_collection": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "pre_delete_collection",
- "display_name": "Pre Delete Collection",
- "advanced": true,
- "dynamic": false,
- "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
- "load_from_db": false,
- "title_case": false
- },
- "setup_mode": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "Sync",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "Sync",
- "Async",
- "Off"
- ],
- "name": "setup_mode",
- "display_name": "Setup Mode",
- "advanced": true,
- "dynamic": false,
- "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "token": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "token",
- "display_name": "Token",
- "advanced": false,
- "dynamic": false,
- "info": "Authentication token for accessing Astra DB.",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "_type": "CustomComponent"
- },
- "description": "Builds or loads an Astra DB Vector Store.",
- "icon": "AstraDB",
- "base_classes": [
- "VectorStore"
- ],
- "display_name": "Astra DB",
- "documentation": "",
- "custom_fields": {
- "embedding": null,
- "token": null,
- "api_endpoint": null,
- "collection_name": null,
- "inputs": null,
- "namespace": null,
- "metric": null,
- "batch_size": null,
- "bulk_insert_batch_concurrency": null,
- "bulk_insert_overwrite_concurrency": null,
- "bulk_delete_concurrency": null,
- "setup_mode": null,
- "pre_delete_collection": null,
- "metadata_indexing_include": null,
- "metadata_indexing_exclude": null,
- "collection_indexing_policy": null
- },
- "output_types": [
- "VectorStore"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [
- "token",
- "api_endpoint",
- "collection_name",
- "inputs",
- "embedding"
- ],
- "beta": false
- },
- "id": "AstraDB-eUCSS"
- },
- "selected": false,
- "width": 384,
- "height": 573,
- "positionAbsolute": {
- "x": 3372.04958055989,
- "y": 1611.0742035495277
- },
- "dragging": false
+ "description": "Create a prompt template with dynamic variables.",
+ "icon": "prompts",
+ "is_input": null,
+ "is_output": null,
+ "is_composition": null,
+ "base_classes": ["object", "Text", "str"],
+ "name": "",
+ "display_name": "Prompt",
+ "documentation": "",
+ "custom_fields": {
+ "template": ["context", "question"]
},
- {
- "id": "OpenAIEmbeddings-9TPjc",
- "type": "genericNode",
- "position": {
- "x": 2814.0402191223047,
- "y": 1955.9268168273086
- },
- "data": {
- "type": "OpenAIEmbeddings",
- "node": {
- "template": {
- "allowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "allowed_special",
- "display_name": "Allowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "chunk_size": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 1000,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "chunk_size",
- "display_name": "Chunk Size",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "client": {
- "type": "Any",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "client",
- "display_name": "Client",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "code": {
- "type": "code",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": true,
- "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "code",
- "advanced": true,
- "dynamic": true,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_headers": {
- "type": "dict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_headers",
- "display_name": "Default Headers",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "default_query": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "default_query",
- "display_name": "Default Query",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "deployment": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "deployment",
- "display_name": "Deployment",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "disallowed_special": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": [
- "all"
- ],
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "disallowed_special",
- "display_name": "Disallowed Special",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "embedding_ctx_length": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 8191,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "embedding_ctx_length",
- "display_name": "Embedding Context Length",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "max_retries": {
- "type": "int",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": 6,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "max_retries",
- "display_name": "Max Retries",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "model": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": true,
- "show": true,
- "multiline": false,
- "value": "text-embedding-ada-002",
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "options": [
- "text-embedding-3-small",
- "text-embedding-3-large",
- "text-embedding-ada-002"
- ],
- "name": "model",
- "display_name": "Model",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "model_kwargs": {
- "type": "NestedDict",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": {},
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "model_kwargs",
- "display_name": "Model Kwargs",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "openai_api_base": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_base",
- "display_name": "OpenAI API Base",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_key": {
- "type": "str",
- "required": true,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_key",
- "display_name": "OpenAI API Key",
- "advanced": false,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ],
- "value": ""
- },
- "openai_api_type": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": true,
- "name": "openai_api_type",
- "display_name": "OpenAI API Type",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_api_version": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_api_version",
- "display_name": "OpenAI API Version",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_organization": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_organization",
- "display_name": "OpenAI Organization",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "openai_proxy": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "openai_proxy",
- "display_name": "OpenAI Proxy",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "request_timeout": {
- "type": "float",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "request_timeout",
- "display_name": "Request Timeout",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "rangeSpec": {
- "step_type": "float",
- "min": -1,
- "max": 1,
- "step": 0.1
- },
- "load_from_db": false,
- "title_case": false
- },
- "show_progress_bar": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "show_progress_bar",
- "display_name": "Show Progress Bar",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "skip_empty": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "skip_empty",
- "display_name": "Skip Empty",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_enable": {
- "type": "bool",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "value": true,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_enable",
- "display_name": "TikToken Enable",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false
- },
- "tiktoken_model_name": {
- "type": "str",
- "required": false,
- "placeholder": "",
- "list": false,
- "show": true,
- "multiline": false,
- "fileTypes": [],
- "file_path": "",
- "password": false,
- "name": "tiktoken_model_name",
- "display_name": "TikToken Model Name",
- "advanced": true,
- "dynamic": false,
- "info": "",
- "load_from_db": false,
- "title_case": false,
- "input_types": [
- "Text"
- ]
- },
- "_type": "CustomComponent"
- },
- "description": "Generate embeddings using OpenAI models.",
- "base_classes": [
- "Embeddings"
- ],
- "display_name": "OpenAI Embeddings",
- "documentation": "",
- "custom_fields": {
- "openai_api_key": null,
- "default_headers": null,
- "default_query": null,
- "allowed_special": null,
- "disallowed_special": null,
- "chunk_size": null,
- "client": null,
- "deployment": null,
- "embedding_ctx_length": null,
- "max_retries": null,
- "model": null,
- "model_kwargs": null,
- "openai_api_base": null,
- "openai_api_type": null,
- "openai_api_version": null,
- "openai_organization": null,
- "openai_proxy": null,
- "request_timeout": null,
- "show_progress_bar": null,
- "skip_empty": null,
- "tiktoken_enable": null,
- "tiktoken_model_name": null
- },
- "output_types": [
- "Embeddings"
- ],
- "field_formatters": {},
- "frozen": false,
- "field_order": [],
- "beta": false
- },
- "id": "OpenAIEmbeddings-9TPjc"
- },
- "selected": false,
- "width": 384,
- "height": 383,
- "positionAbsolute": {
- "x": 2814.0402191223047,
- "y": 1955.9268168273086
- },
- "dragging": false
- }
- ],
- "edges": [
- {
- "source": "TextOutput-BDknO",
- "target": "Prompt-xeI6K",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}",
- "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "id": "reactflow__edge-TextOutput-BDknO{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "context",
- "id": "Prompt-xeI6K",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "TextOutput",
- "id": "TextOutput-BDknO"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": ["Text"],
+ "full_path": null,
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false,
+ "error": null
+ },
+ "id": "Prompt-xeI6K",
+ "description": "Create a prompt template with dynamic variables.",
+ "display_name": "Prompt"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 477,
+ "positionAbsolute": {
+ "x": 2969.0261961391298,
+ "y": 442.1613649809069
+ },
+ "dragging": false
+ },
+ {
+ "id": "ChatOutput-Q39I8",
+ "type": "genericNode",
+ "position": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "data": {
+ "type": "ChatOutput",
+ "node": {
+ "template": {
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Message",
+ "advanced": false,
+ "input_types": ["Text"],
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "record_template": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "{text}",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "record_template",
+ "display_name": "Record Template",
+ "advanced": true,
+ "dynamic": false,
+ "info": "In case of Message being a Record, this template will be used to convert it to text.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "return_record": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "return_record",
+ "display_name": "Return Record",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Return the message as a record containing the sender, sender_name, and session_id.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "sender": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Machine",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Machine", "User"],
+ "name": "sender",
+ "display_name": "Sender Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "sender_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "AI",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "sender_name",
+ "display_name": "Sender Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "session_id": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "session_id",
+ "display_name": "Session ID",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If provided, the message will be stored in the memory.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "ChatInput-yxMKE",
- "target": "Prompt-xeI6K",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}",
- "targetHandle": "{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "question",
- "id": "Prompt-xeI6K",
- "inputTypes": [
- "Document",
- "BaseOutputParser",
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-yxMKE"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "description": "Display a chat message in the Playground.",
+ "icon": "ChatOutput",
+ "base_classes": ["object", "Text", "Record", "str"],
+ "display_name": "Chat Output",
+ "documentation": "",
+ "custom_fields": {
+ "sender": null,
+ "sender_name": null,
+ "input_value": null,
+ "session_id": null,
+ "return_record": null,
+ "record_template": null
},
- {
- "source": "Prompt-xeI6K",
- "target": "OpenAIModel-EjXlN",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "id": "reactflow__edge-Prompt-xeI6K{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}-OpenAIModel-EjXlN{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "OpenAIModel-EjXlN",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "Prompt",
- "id": "Prompt-xeI6K"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": ["Text", "Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "ChatOutput-Q39I8"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 3887.2073667611485,
+ "y": 588.4801225794856
+ },
+ "dragging": false
+ },
+ {
+ "id": "File-t0a6a",
+ "type": "genericNode",
+ "position": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "data": {
+ "type": "File",
+ "node": {
+ "template": {
+ "path": {
+ "type": "file",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [
+ ".txt",
+ ".md",
+ ".mdx",
+ ".csv",
+ ".json",
+ ".yaml",
+ ".yml",
+ ".xml",
+ ".html",
+ ".htm",
+ ".pdf",
+ ".docx",
+ ".py",
+ ".sh",
+ ".sql",
+ ".js",
+ ".ts",
+ ".tsx"
+ ],
+ "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf",
+ "password": false,
+ "name": "path",
+ "display_name": "Path",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx",
+ "load_from_db": false,
+ "title_case": false,
+ "value": ""
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "silent_errors": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "silent_errors",
+ "display_name": "Silent Errors",
+ "advanced": true,
+ "dynamic": false,
+ "info": "If true, errors will not raise an exception.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIModel-EjXlN",
- "target": "ChatOutput-Q39I8",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "id": "reactflow__edge-OpenAIModel-EjXlN{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}-ChatOutput-Q39I8{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "ChatOutput-Q39I8",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "object",
- "Text",
- "str"
- ],
- "dataType": "OpenAIModel",
- "id": "OpenAIModel-EjXlN"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "description": "A generic file loader.",
+ "icon": "file-text",
+ "base_classes": ["Record"],
+ "display_name": "File",
+ "documentation": "",
+ "custom_fields": {
+ "path": null,
+ "silent_errors": null
},
- {
- "source": "File-t0a6a",
- "target": "RecursiveCharacterTextSplitter-tR9QM",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}",
- "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}",
- "id": "reactflow__edge-File-t0a6a{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}-RecursiveCharacterTextSplitter-tR9QM{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "inputs",
- "id": "RecursiveCharacterTextSplitter-tR9QM",
- "inputTypes": [
- "Document",
- "Record"
- ],
- "type": "Document"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "File",
- "id": "File-t0a6a"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "selected": false
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "File-t0a6a"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 281,
+ "positionAbsolute": {
+ "x": 2257.233450682836,
+ "y": 1747.5389618367233
+ },
+ "dragging": false
+ },
+ {
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "type": "genericNode",
+ "position": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "data": {
+ "type": "RecursiveCharacterTextSplitter",
+ "node": {
+ "template": {
+ "inputs": {
+ "type": "Document",
+ "required": true,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Input",
+ "advanced": false,
+ "input_types": ["Document", "Record"],
+ "dynamic": false,
+ "info": "The texts to split.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_overlap": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 200,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_overlap",
+ "display_name": "Chunk Overlap",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The amount of overlap between chunks.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The maximum length of each chunk.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Optional\n\nfrom langchain_core.documents import Document\nfrom langchain_text_splitters import RecursiveCharacterTextSplitter\n\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "separators": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "separators",
+ "display_name": "Separators",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": [""]
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "OpenAIEmbeddings-ZlOk1",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}",
- "target": "AstraDBSearch-41nRz",
- "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "embedding",
- "id": "AstraDBSearch-41nRz",
- "inputTypes": null,
- "type": "Embeddings"
- },
- "sourceHandle": {
- "baseClasses": [
- "Embeddings"
- ],
- "dataType": "OpenAIEmbeddings",
- "id": "OpenAIEmbeddings-ZlOk1"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}"
+ "description": "Split text into chunks of a specified length.",
+ "base_classes": ["Record"],
+ "display_name": "Recursive Character Text Splitter",
+ "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter",
+ "custom_fields": {
+ "inputs": null,
+ "separators": null,
+ "chunk_size": null,
+ "chunk_overlap": null
},
- {
- "source": "ChatInput-yxMKE",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}",
- "target": "AstraDBSearch-41nRz",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "AstraDBSearch-41nRz",
- "inputTypes": [
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Text",
- "str",
- "object",
- "Record"
- ],
- "dataType": "ChatInput",
- "id": "ChatInput-yxMKE"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 501,
+ "positionAbsolute": {
+ "x": 2791.013514133929,
+ "y": 1462.9588953494142
+ },
+ "dragging": false
+ },
+ {
+ "id": "AstraDBSearch-41nRz",
+ "type": "genericNode",
+ "position": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
+ },
+ "data": {
+ "type": "AstraDBSearch",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "input_value": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "input_value",
+ "display_name": "Input Value",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Input value to search",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_API_ENDPOINT"
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "number_of_results": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 4,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "number_of_results",
+ "display_name": "Number of Results",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Number of results to return.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "search_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Similarity",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Similarity", "MMR"],
+ "name": "search_type",
+ "display_name": "Search Type",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Sync",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Sync", "Async", "Off"],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_APPLICATION_TOKEN"
+ },
+ "_type": "CustomComponent"
},
- {
- "source": "RecursiveCharacterTextSplitter-tR9QM",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}",
- "target": "AstraDB-eUCSS",
- "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "inputs",
- "id": "AstraDB-eUCSS",
- "inputTypes": null,
- "type": "Record"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "RecursiveCharacterTextSplitter",
- "id": "RecursiveCharacterTextSplitter-tR9QM"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}",
- "selected": false
+ "description": "Searches an existing Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": ["Record"],
+ "display_name": "Astra DB Search",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "collection_name": null,
+ "input_value": null,
+ "token": null,
+ "api_endpoint": null,
+ "search_type": null,
+ "number_of_results": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
},
- {
- "source": "OpenAIEmbeddings-9TPjc",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}",
- "target": "AstraDB-eUCSS",
- "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "embedding",
- "id": "AstraDB-eUCSS",
- "inputTypes": null,
- "type": "Embeddings"
- },
- "sourceHandle": {
- "baseClasses": [
- "Embeddings"
- ],
- "dataType": "OpenAIEmbeddings",
- "id": "OpenAIEmbeddings-9TPjc"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
- "selected": false
- },
- {
- "source": "AstraDBSearch-41nRz",
- "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}",
- "target": "TextOutput-BDknO",
- "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
- "data": {
- "targetHandle": {
- "fieldName": "input_value",
- "id": "TextOutput-BDknO",
- "inputTypes": [
- "Record",
- "Text"
- ],
- "type": "str"
- },
- "sourceHandle": {
- "baseClasses": [
- "Record"
- ],
- "dataType": "AstraDBSearch",
- "id": "AstraDBSearch-41nRz"
- }
- },
- "style": {
- "stroke": "#555"
- },
- "className": "stroke-gray-900 stroke-connection",
- "id": "reactflow__edge-AstraDBSearch-41nRz{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}-TextOutput-BDknO{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
- }
- ],
- "viewport": {
- "x": -259.6782520315529,
- "y": 90.3428735006047,
- "zoom": 0.2687057134854984
+ "output_types": ["Record"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "input_value",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDBSearch-41nRz"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 713,
+ "dragging": false,
+ "positionAbsolute": {
+ "x": 1723.976434815103,
+ "y": 277.03317407245913
}
- },
- "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.",
- "name": "Vector Store RAG",
- "last_tested_version": "1.0.0a0",
- "is_component": false
+ },
+ {
+ "id": "AstraDB-eUCSS",
+ "type": "genericNode",
+ "position": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "data": {
+ "type": "AstraDB",
+ "node": {
+ "template": {
+ "embedding": {
+ "type": "Embeddings",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding",
+ "display_name": "Embedding",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Embedding to use",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "inputs": {
+ "type": "Record",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "inputs",
+ "display_name": "Inputs",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Optional list of records to be processed and stored in the vector store.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "api_endpoint": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "api_endpoint",
+ "display_name": "API Endpoint",
+ "advanced": false,
+ "dynamic": false,
+ "info": "API endpoint URL for the Astra DB service.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_API_ENDPOINT"
+ },
+ "batch_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "batch_size",
+ "display_name": "Batch Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional number of records to process in a single batch.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_delete_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_delete_concurrency",
+ "display_name": "Bulk Delete Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk delete operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_batch_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_batch_concurrency",
+ "display_name": "Bulk Insert Batch Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "bulk_insert_overwrite_concurrency": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "bulk_insert_overwrite_concurrency",
+ "display_name": "Bulk Insert Overwrite Concurrency",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional concurrency level for bulk insert operations that overwrite existing records.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import List, Optional, Union\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\nfrom langchain_core.retrievers import BaseRetriever\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_indexing_policy": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_indexing_policy",
+ "display_name": "Collection Indexing Policy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional dictionary defining the indexing policy for the collection.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "collection_name": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "collection_name",
+ "display_name": "Collection Name",
+ "advanced": false,
+ "dynamic": false,
+ "info": "The name of the collection within Astra DB where the vectors will be stored.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "langflow"
+ },
+ "metadata_indexing_exclude": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_exclude",
+ "display_name": "Metadata Indexing Exclude",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to exclude from the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metadata_indexing_include": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metadata_indexing_include",
+ "display_name": "Metadata Indexing Include",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional list of metadata fields to include in the indexing.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "metric": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "metric",
+ "display_name": "Metric",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional distance metric for vector comparisons in the vector store.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "namespace": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "namespace",
+ "display_name": "Namespace",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Optional namespace within Astra DB to use for the collection.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "pre_delete_collection": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "pre_delete_collection",
+ "display_name": "Pre Delete Collection",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Boolean flag to determine whether to delete the collection before creating a new one.",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "setup_mode": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "Sync",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": ["Sync", "Async", "Off"],
+ "name": "setup_mode",
+ "display_name": "Setup Mode",
+ "advanced": true,
+ "dynamic": false,
+ "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "token": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "token",
+ "display_name": "Token",
+ "advanced": false,
+ "dynamic": false,
+ "info": "Authentication token for accessing Astra DB.",
+ "load_from_db": true,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "ASTRA_DB_APPLICATION_TOKEN"
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Builds or loads an Astra DB Vector Store.",
+ "icon": "AstraDB",
+ "base_classes": ["VectorStore"],
+ "display_name": "Astra DB",
+ "documentation": "",
+ "custom_fields": {
+ "embedding": null,
+ "token": null,
+ "api_endpoint": null,
+ "collection_name": null,
+ "inputs": null,
+ "namespace": null,
+ "metric": null,
+ "batch_size": null,
+ "bulk_insert_batch_concurrency": null,
+ "bulk_insert_overwrite_concurrency": null,
+ "bulk_delete_concurrency": null,
+ "setup_mode": null,
+ "pre_delete_collection": null,
+ "metadata_indexing_include": null,
+ "metadata_indexing_exclude": null,
+ "collection_indexing_policy": null
+ },
+ "output_types": ["VectorStore"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [
+ "token",
+ "api_endpoint",
+ "collection_name",
+ "inputs",
+ "embedding"
+ ],
+ "beta": false
+ },
+ "id": "AstraDB-eUCSS"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 573,
+ "positionAbsolute": {
+ "x": 3372.04958055989,
+ "y": 1611.0742035495277
+ },
+ "dragging": false
+ },
+ {
+ "id": "OpenAIEmbeddings-9TPjc",
+ "type": "genericNode",
+ "position": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "data": {
+ "type": "OpenAIEmbeddings",
+ "node": {
+ "template": {
+ "allowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": [],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "allowed_special",
+ "display_name": "Allowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "chunk_size": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 1000,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "client": {
+ "type": "Any",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "client",
+ "display_name": "Client",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "code": {
+ "type": "code",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": true,
+ "value": "from typing import Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, NestedDict\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "code",
+ "advanced": true,
+ "dynamic": true,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_headers": {
+ "type": "dict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_headers",
+ "display_name": "Default Headers",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "default_query": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "default_query",
+ "display_name": "Default Query",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "deployment": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "deployment",
+ "display_name": "Deployment",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "disallowed_special": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": ["all"],
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "disallowed_special",
+ "display_name": "Disallowed Special",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "embedding_ctx_length": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 8191,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "embedding_ctx_length",
+ "display_name": "Embedding Context Length",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "max_retries": {
+ "type": "int",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": 6,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "max_retries",
+ "display_name": "Max Retries",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "model": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": true,
+ "show": true,
+ "multiline": false,
+ "value": "text-embedding-ada-002",
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "options": [
+ "text-embedding-3-small",
+ "text-embedding-3-large",
+ "text-embedding-ada-002"
+ ],
+ "name": "model",
+ "display_name": "Model",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "model_kwargs": {
+ "type": "NestedDict",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": {},
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "model_kwargs",
+ "display_name": "Model Kwargs",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "openai_api_base": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_base",
+ "display_name": "OpenAI API Base",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_key": {
+ "type": "str",
+ "required": true,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_key",
+ "display_name": "OpenAI API Key",
+ "advanced": false,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"],
+ "value": "OPENAI_API_KEY"
+ },
+ "openai_api_type": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": true,
+ "name": "openai_api_type",
+ "display_name": "OpenAI API Type",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_api_version": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_api_version",
+ "display_name": "OpenAI API Version",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_organization": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_organization",
+ "display_name": "OpenAI Organization",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "openai_proxy": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "openai_proxy",
+ "display_name": "OpenAI Proxy",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "request_timeout": {
+ "type": "float",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "request_timeout",
+ "display_name": "Request Timeout",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "rangeSpec": {
+ "step_type": "float",
+ "min": -1,
+ "max": 1,
+ "step": 0.1
+ },
+ "load_from_db": false,
+ "title_case": false
+ },
+ "show_progress_bar": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "show_progress_bar",
+ "display_name": "Show Progress Bar",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "skip_empty": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "skip_empty",
+ "display_name": "Skip Empty",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_enable": {
+ "type": "bool",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "value": true,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_enable",
+ "display_name": "TikToken Enable",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false
+ },
+ "tiktoken_model_name": {
+ "type": "str",
+ "required": false,
+ "placeholder": "",
+ "list": false,
+ "show": true,
+ "multiline": false,
+ "fileTypes": [],
+ "file_path": "",
+ "password": false,
+ "name": "tiktoken_model_name",
+ "display_name": "TikToken Model Name",
+ "advanced": true,
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "title_case": false,
+ "input_types": ["Text"]
+ },
+ "_type": "CustomComponent"
+ },
+ "description": "Generate embeddings using OpenAI models.",
+ "base_classes": ["Embeddings"],
+ "display_name": "OpenAI Embeddings",
+ "documentation": "",
+ "custom_fields": {
+ "openai_api_key": null,
+ "default_headers": null,
+ "default_query": null,
+ "allowed_special": null,
+ "disallowed_special": null,
+ "chunk_size": null,
+ "client": null,
+ "deployment": null,
+ "embedding_ctx_length": null,
+ "max_retries": null,
+ "model": null,
+ "model_kwargs": null,
+ "openai_api_base": null,
+ "openai_api_type": null,
+ "openai_api_version": null,
+ "openai_organization": null,
+ "openai_proxy": null,
+ "request_timeout": null,
+ "show_progress_bar": null,
+ "skip_empty": null,
+ "tiktoken_enable": null,
+ "tiktoken_model_name": null
+ },
+ "output_types": ["Embeddings"],
+ "field_formatters": {},
+ "frozen": false,
+ "field_order": [],
+ "beta": false
+ },
+ "id": "OpenAIEmbeddings-9TPjc"
+ },
+ "selected": false,
+ "width": 384,
+ "height": 383,
+ "positionAbsolute": {
+ "x": 2814.0402191223047,
+ "y": 1955.9268168273086
+ },
+ "dragging": false
+ }
+ ],
+ "edges": [
+ {
+ "source": "TextOutput-BDknO",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-TextOutput-BDknO{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "context",
+ "id": "Prompt-xeI6K",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "TextOutput",
+ "id": "TextOutput-BDknO"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "target": "Prompt-xeI6K",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "question",
+ "id": "Prompt-xeI6K",
+ "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Text", "str", "object", "Record"],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "Prompt-xeI6K",
+ "target": "OpenAIModel-EjXlN",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-Prompt-xeI6K{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}-OpenAIModel-EjXlN{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "OpenAIModel-EjXlN",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "Prompt",
+ "id": "Prompt-xeI6K"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIModel-EjXlN",
+ "target": "ChatOutput-Q39I8",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "id": "reactflow__edge-OpenAIModel-EjXlN{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}-ChatOutput-Q39I8{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-Q39I8",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["object", "Text", "str"],
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-EjXlN"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "File-t0a6a",
+ "target": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}",
+ "id": "reactflow__edge-File-t0a6a{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}-RecursiveCharacterTextSplitter-tR9QM{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "RecursiveCharacterTextSplitter-tR9QM",
+ "inputTypes": ["Document", "Record"],
+ "type": "Document"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "File",
+ "id": "File-t0a6a"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-ZlOk1",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Embeddings"],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-ZlOk1"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}"
+ },
+ {
+ "source": "ChatInput-yxMKE",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}",
+ "target": "AstraDBSearch-41nRz",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "AstraDBSearch-41nRz",
+ "inputTypes": ["Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Text", "str", "object", "Record"],
+ "dataType": "ChatInput",
+ "id": "ChatInput-yxMKE"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ },
+ {
+ "source": "RecursiveCharacterTextSplitter-tR9QM",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "inputs",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Record"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "RecursiveCharacterTextSplitter",
+ "id": "RecursiveCharacterTextSplitter-tR9QM"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}",
+ "selected": false
+ },
+ {
+ "source": "OpenAIEmbeddings-9TPjc",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}",
+ "target": "AstraDB-eUCSS",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "embedding",
+ "id": "AstraDB-eUCSS",
+ "inputTypes": null,
+ "type": "Embeddings"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Embeddings"],
+ "dataType": "OpenAIEmbeddings",
+ "id": "OpenAIEmbeddings-9TPjc"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}",
+ "selected": false
+ },
+ {
+ "source": "AstraDBSearch-41nRz",
+ "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}",
+ "target": "TextOutput-BDknO",
+ "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}",
+ "data": {
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "TextOutput-BDknO",
+ "inputTypes": ["Record", "Text"],
+ "type": "str"
+ },
+ "sourceHandle": {
+ "baseClasses": ["Record"],
+ "dataType": "AstraDBSearch",
+ "id": "AstraDBSearch-41nRz"
+ }
+ },
+ "style": {
+ "stroke": "#555"
+ },
+ "className": "stroke-gray-900 stroke-connection",
+ "id": "reactflow__edge-AstraDBSearch-41nRz{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}-TextOutput-BDknO{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}"
+ }
+ ],
+ "viewport": {
+ "x": -259.6782520315529,
+ "y": 90.3428735006047,
+ "zoom": 0.2687057134854984
+ }
+ },
+ "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.",
+ "name": "Vector Store RAG",
+ "last_tested_version": "1.0.0a0",
+ "is_component": false
}
diff --git a/src/backend/base/langflow/interface/agents/__init__.py b/src/backend/base/langflow/interface/agents/__init__.py
deleted file mode 100644
index df15bc39b..000000000
--- a/src/backend/base/langflow/interface/agents/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langflow.interface.agents.base import AgentCreator
-
-__all__ = ["AgentCreator"]
diff --git a/src/backend/base/langflow/interface/agents/base.py b/src/backend/base/langflow/interface/agents/base.py
deleted file mode 100644
index ee510580c..000000000
--- a/src/backend/base/langflow/interface/agents/base.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from typing import ClassVar, Dict, List, Optional
-
-from langchain.agents import types
-from langflow.interface.agents.custom import CUSTOM_AGENTS
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.utils import build_template_from_class
-from langflow.legacy_custom.customs import get_custom_nodes
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.agents import AgentFrontendNode
-from langflow.utils.util import build_template_from_method
-from loguru import logger
-
-
-class AgentCreator(LangChainTypeCreator):
- type_name: str = "agents"
-
- from_method_nodes: ClassVar[Dict] = {"ZeroShotAgent": "from_llm_and_tools"}
-
- @property
- def frontend_node_class(self) -> type[AgentFrontendNode]:
- return AgentFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- self.type_dict = types.AGENT_TO_CLASS
- # Add JsonAgent to the list of agents
- for name, agent in CUSTOM_AGENTS.items():
- # TODO: validate AgentType
- self.type_dict[name] = agent # type: ignore
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- try:
- if name in get_custom_nodes(self.type_name).keys():
- return get_custom_nodes(self.type_name)[name]
- elif name in self.from_method_nodes:
- return build_template_from_method(
- name,
- type_to_cls_dict=self.type_to_loader_dict,
- add_function=True,
- method_name=self.from_method_nodes[name],
- )
- return build_template_from_class(name, self.type_to_loader_dict, add_function=True)
- except ValueError as exc:
- raise ValueError("Agent not found") from exc
- except AttributeError as exc:
- logger.error(f"Agent {name} not loaded: {exc}")
- return None
-
- # Now this is a generator
- def to_list(self) -> List[str]:
- names = []
- settings_service = get_settings_service()
- for _, agent in self.type_to_loader_dict.items():
- agent_name = agent.function_name() if hasattr(agent, "function_name") else agent.__name__
- if agent_name in settings_service.settings.AGENTS or settings_service.settings.DEV:
- names.append(agent_name)
- return names
-
-
-agent_creator = AgentCreator()
diff --git a/src/backend/base/langflow/interface/agents/custom.py b/src/backend/base/langflow/interface/agents/custom.py
deleted file mode 100644
index 680bc9bf8..000000000
--- a/src/backend/base/langflow/interface/agents/custom.py
+++ /dev/null
@@ -1,269 +0,0 @@
-from typing import Any, Optional
-
-from langchain.agents import AgentExecutor, ZeroShotAgent
-from langchain.agents.agent_toolkits import VectorStoreInfo, VectorStoreRouterToolkit, VectorStoreToolkit
-from langchain.agents.agent_toolkits.vectorstore.prompt import PREFIX as VECTORSTORE_PREFIX
-from langchain.agents.agent_toolkits.vectorstore.prompt import ROUTER_PREFIX as VECTORSTORE_ROUTER_PREFIX
-from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
-from langchain.base_language import BaseLanguageModel
-from langchain.chains.llm import LLMChain
-from langchain_community.utilities import SQLDatabase
-from langchain.tools.sql_database.prompt import QUERY_CHECKER
-from langchain_community.agent_toolkits import SQLDatabaseToolkit
-from langchain_community.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
-from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
-from langchain_community.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX
-from langchain_experimental.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
-from langchain_experimental.agents.agent_toolkits.pandas.prompt import SUFFIX_WITH_DF as PANDAS_SUFFIX
-from langchain_experimental.tools.python.tool import PythonAstREPLTool
-
-from langflow.interface.base import CustomAgentExecutor
-
-
-class JsonAgent(CustomAgentExecutor):
- """Json agent"""
-
- @staticmethod
- def function_name():
- return "JsonAgent"
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- return cls.from_toolkit_and_llm(*args, **kwargs)
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- @classmethod
- def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
- tools = toolkit if isinstance(toolkit, list) else toolkit.get_tools()
- tool_names = list({tool.name for tool in tools})
- prompt = ZeroShotAgent.create_prompt(
- tools,
- prefix=JSON_PREFIX,
- suffix=JSON_SUFFIX,
- format_instructions=FORMAT_INSTRUCTIONS,
- input_variables=None,
- )
- llm_chain = LLMChain(
- llm=llm,
- prompt=prompt,
- )
- agent = ZeroShotAgent(
- llm_chain=llm_chain,
- allowed_tools=tool_names, # type: ignore
- )
- return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-class CSVAgent(CustomAgentExecutor):
- """CSV agent"""
-
- @staticmethod
- def function_name():
- return "CSVAgent"
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- return cls.from_toolkit_and_llm(*args, **kwargs)
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- @classmethod
- def from_toolkit_and_llm(
- cls, path: str, llm: BaseLanguageModel, pandas_kwargs: Optional[dict] = None, **kwargs: Any
- ):
- import pandas as pd # type: ignore
-
- _kwargs = pandas_kwargs or {}
- df = pd.read_csv(path, **_kwargs)
-
- tools = [PythonAstREPLTool(locals={"df": df})] # type: ignore
- prompt = ZeroShotAgent.create_prompt(
- tools,
- prefix=PANDAS_PREFIX,
- suffix=PANDAS_SUFFIX,
- input_variables=["df_head", "input", "agent_scratchpad"],
- )
- partial_prompt = prompt.partial(df_head=str(df.head()))
- llm_chain = LLMChain(
- llm=llm,
- prompt=partial_prompt,
- )
- tool_names = list({tool.name for tool in tools})
- agent = ZeroShotAgent(
- llm_chain=llm_chain,
- allowed_tools=tool_names,
- **kwargs, # type: ignore
- )
-
- return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-class VectorStoreAgent(CustomAgentExecutor):
- """Vector store agent"""
-
- @staticmethod
- def function_name():
- return "VectorStoreAgent"
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- return cls.from_toolkit_and_llm(*args, **kwargs)
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- @classmethod
- def from_toolkit_and_llm(cls, llm: BaseLanguageModel, vectorstoreinfo: VectorStoreInfo, **kwargs: Any):
- """Construct a vectorstore agent from an LLM and tools."""
-
- toolkit = VectorStoreToolkit(vectorstore_info=vectorstoreinfo, llm=llm)
-
- tools = toolkit.get_tools()
- prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_PREFIX)
- llm_chain = LLMChain(
- llm=llm,
- prompt=prompt,
- )
- tool_names = list({tool.name for tool in tools})
- agent = ZeroShotAgent(
- llm_chain=llm_chain,
- allowed_tools=tool_names,
- **kwargs, # type: ignore
- )
- return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, verbose=True, handle_parsing_errors=True)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-class SQLAgent(CustomAgentExecutor):
- """SQL agent"""
-
- @staticmethod
- def function_name():
- return "SQLAgent"
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- return cls.from_toolkit_and_llm(*args, **kwargs)
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- @classmethod
- def from_toolkit_and_llm(cls, llm: BaseLanguageModel, database_uri: str, **kwargs: Any):
- """Construct an SQL agent from an LLM and tools."""
- db = SQLDatabase.from_uri(database_uri)
- toolkit = SQLDatabaseToolkit(db=db, llm=llm)
-
- # The right code should be this, but there is a problem with tools = toolkit.get_tools()
- # related to `OPENAI_API_KEY`
- # return create_sql_agent(llm=llm, toolkit=toolkit, verbose=True)
- from langchain.prompts import PromptTemplate
- from langchain.tools.sql_database.tool import (
- InfoSQLDatabaseTool,
- ListSQLDatabaseTool,
- QuerySQLCheckerTool,
- QuerySQLDataBaseTool,
- )
-
- llmchain = LLMChain(
- llm=llm,
- prompt=PromptTemplate(template=QUERY_CHECKER, input_variables=["query", "dialect"]),
- )
-
- tools = [
- QuerySQLDataBaseTool(db=db), # type: ignore
- InfoSQLDatabaseTool(db=db), # type: ignore
- ListSQLDatabaseTool(db=db), # type: ignore
- QuerySQLCheckerTool(db=db, llm_chain=llmchain, llm=llm), # type: ignore
- ]
-
- prefix = SQL_PREFIX.format(dialect=toolkit.dialect, top_k=10)
- prompt = ZeroShotAgent.create_prompt(
- tools=tools, # type: ignore
- prefix=prefix,
- suffix=SQL_SUFFIX,
- format_instructions=FORMAT_INSTRUCTIONS,
- )
- llm_chain = LLMChain(
- llm=llm,
- prompt=prompt,
- )
- tool_names = list({tool.name for tool in tools}) # type: ignore
- agent = ZeroShotAgent(
- llm_chain=llm_chain,
- allowed_tools=tool_names,
- **kwargs, # type: ignore
- )
- return AgentExecutor.from_agent_and_tools(
- agent=agent,
- tools=tools, # type: ignore
- verbose=True,
- max_iterations=15,
- early_stopping_method="force",
- handle_parsing_errors=True,
- )
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-class VectorStoreRouterAgent(CustomAgentExecutor):
- """Vector Store Router Agent"""
-
- @staticmethod
- def function_name():
- return "VectorStoreRouterAgent"
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- return cls.from_toolkit_and_llm(*args, **kwargs)
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- @classmethod
- def from_toolkit_and_llm(
- cls, llm: BaseLanguageModel, vectorstoreroutertoolkit: VectorStoreRouterToolkit, **kwargs: Any
- ):
- """Construct a vector store router agent from an LLM and tools."""
-
- tools = (
- vectorstoreroutertoolkit
- if isinstance(vectorstoreroutertoolkit, list)
- else vectorstoreroutertoolkit.get_tools()
- )
- prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_ROUTER_PREFIX)
- llm_chain = LLMChain(
- llm=llm,
- prompt=prompt,
- )
- tool_names = list({tool.name for tool in tools})
- agent = ZeroShotAgent(
- llm_chain=llm_chain,
- allowed_tools=tool_names,
- **kwargs, # type: ignore
- )
- return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, verbose=True, handle_parsing_errors=True)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-CUSTOM_AGENTS = {
- "JsonAgent": JsonAgent,
- "CSVAgent": CSVAgent,
- "VectorStoreAgent": VectorStoreAgent,
- "VectorStoreRouterAgent": VectorStoreRouterAgent,
- "SQLAgent": SQLAgent,
-}
diff --git a/src/backend/base/langflow/interface/agents/prebuilt.py b/src/backend/base/langflow/interface/agents/prebuilt.py
deleted file mode 100644
index ec4799a81..000000000
--- a/src/backend/base/langflow/interface/agents/prebuilt.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from langchain.chains.llm import LLMChain
-from langchain.agents import AgentExecutor, ZeroShotAgent
-from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
-from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
-from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
-from langchain.base_language import BaseLanguageModel
-
-
-class MalfoyAgent(AgentExecutor):
- """Json agent"""
-
- prefix = "Malfoy: "
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- return cls.from_toolkit_and_llm(*args, **kwargs)
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- @classmethod
- def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
- tools = toolkit.get_tools()
- tool_names = {tool.name for tool in tools}
- prompt = ZeroShotAgent.create_prompt(
- tools,
- prefix=JSON_PREFIX,
- suffix=JSON_SUFFIX,
- format_instructions=FORMAT_INSTRUCTIONS,
- input_variables=None,
- )
- llm_chain = LLMChain(
- llm=llm,
- prompt=prompt,
- )
- agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names) # type: ignore
- return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-PREBUILT_AGENTS = {
- "MalfoyAgent": MalfoyAgent,
-}
diff --git a/src/backend/base/langflow/interface/base.py b/src/backend/base/langflow/interface/base.py
deleted file mode 100644
index a300f12f2..000000000
--- a/src/backend/base/langflow/interface/base.py
+++ /dev/null
@@ -1,139 +0,0 @@
-from abc import ABC, abstractmethod
-from typing import Any, Dict, List, Optional, Type, Union
-
-from langchain.agents import AgentExecutor
-from langchain.chains.base import Chain
-from loguru import logger
-from pydantic import BaseModel
-
-from langflow.services.deps import get_settings_service
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.template.base import Template
-
-# Assuming necessary imports for Field, Template, and FrontendNode classes
-
-
-class LangChainTypeCreator(BaseModel, ABC):
- type_name: str
- type_dict: Optional[Dict] = None
- name_docs_dict: Optional[Dict[str, str]] = None
-
- @property
- def frontend_node_class(self) -> Type[FrontendNode]:
- """The class type of the FrontendNode created in frontend_node."""
- return FrontendNode
-
- @property
- def docs_map(self) -> Dict[str, str]:
- """A dict with the name of the component as key and the documentation link as value."""
- settings_service = get_settings_service()
- if self.name_docs_dict is None:
- try:
- type_settings = getattr(settings_service.settings, self.type_name.upper())
- self.name_docs_dict = {name: value_dict["documentation"] for name, value_dict in type_settings.items()}
- except AttributeError as exc:
- logger.error(f"Error getting settings for {self.type_name}: {exc}")
-
- self.name_docs_dict = {}
- return self.name_docs_dict
-
- @property
- @abstractmethod
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- raise NotImplementedError
- return self.type_dict
-
- @abstractmethod
- def get_signature(self, name: str) -> Union[Optional[Dict[Any, Any]], FrontendNode]:
- pass
-
- @abstractmethod
- def to_list(self) -> List[str]:
- pass
-
- def to_dict(self) -> Dict:
- result: Dict = {self.type_name: {}}
-
- for name in self.to_list():
- # frontend_node.to_dict() returns a dict with the following structure:
- # {name: {template: {fields}, description: str}}
- # so we should update the result dict
- node = self.frontend_node(name)
- if node is not None:
- node = node.to_dict() # type: ignore
- result[self.type_name].update(node)
-
- return result
-
- def frontend_node(self, name) -> Union[FrontendNode, None]:
- signature = self.get_signature(name)
- if signature is None:
- logger.error(f"Node {name} not loaded")
- return signature
- if not isinstance(signature, FrontendNode):
- fields = [
- TemplateField(
- name=key,
- field_type=value["type"],
- required=value.get("required", False),
- placeholder=value.get("placeholder", ""),
- is_list=value.get("list", False),
- show=value.get("show", True),
- multiline=value.get("multiline", False),
- value=value.get("value", None),
- file_types=value.get("fileTypes", []),
- file_path=value.get("file_path", None),
- )
- for key, value in signature["template"].items()
- if key != "_type"
- ]
- template = Template(type_name=name, fields=fields)
- signature = self.frontend_node_class(
- template=template,
- description=signature.get("description", ""),
- base_classes=signature["base_classes"],
- name=name,
- )
-
- signature.add_extra_fields()
- signature.add_extra_base_classes()
- signature.set_documentation(self.docs_map.get(name, ""))
- return signature
-
-
-class CustomChain(Chain, ABC):
- """Custom chain"""
-
- @staticmethod
- def function_name():
- return "CustomChain"
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- pass
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-class CustomAgentExecutor(AgentExecutor, ABC):
- """Custom chain"""
-
- @staticmethod
- def function_name():
- return "CustomChain"
-
- @classmethod
- def initialize(cls, *args, **kwargs):
- pass
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
diff --git a/src/backend/base/langflow/interface/chains/__init__.py b/src/backend/base/langflow/interface/chains/__init__.py
deleted file mode 100644
index 2e5570b3c..000000000
--- a/src/backend/base/langflow/interface/chains/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langflow.interface.chains.base import ChainCreator
-
-__all__ = ["ChainCreator"]
diff --git a/src/backend/base/langflow/interface/chains/base.py b/src/backend/base/langflow/interface/chains/base.py
deleted file mode 100644
index e69b93614..000000000
--- a/src/backend/base/langflow/interface/chains/base.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from typing import Any, ClassVar, Dict, List, Optional, Type
-
-from langchain import chains
-from langchain_experimental.sql import SQLDatabaseChain
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.importing.utils import import_class
-from langflow.interface.utils import build_template_from_class
-from langflow.legacy_custom.customs import get_custom_nodes
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.chains import ChainFrontendNode
-from langflow.utils.util import build_template_from_method
-from loguru import logger
-
-# Assuming necessary imports for Field, Template, and FrontendNode classes
-
-
-class ChainCreator(LangChainTypeCreator):
- type_name: str = "chains"
-
- @property
- def frontend_node_class(self) -> Type[ChainFrontendNode]:
- return ChainFrontendNode
-
- #! We need to find a better solution for this
- from_method_nodes: ClassVar[Dict] = {
- "ConversationalRetrievalChain": "from_llm",
- "LLMCheckerChain": "from_llm",
- "SQLDatabaseChain": "from_llm",
- }
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- settings_service = get_settings_service()
- self.type_dict: dict[str, Any] = {
- chain_name: import_class(f"langchain.chains.{chain_name}") for chain_name in chains.__all__
- }
- from langflow.interface.chains.custom import CUSTOM_CHAINS
-
- self.type_dict["SQLDatabaseChain"] = SQLDatabaseChain
-
- self.type_dict.update(CUSTOM_CHAINS)
- # Filter according to settings.chains
- self.type_dict = {
- name: chain
- for name, chain in self.type_dict.items()
- if name in settings_service.settings.CHAINS or settings_service.settings.DEV
- }
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- try:
- if name in get_custom_nodes(self.type_name).keys():
- return get_custom_nodes(self.type_name)[name]
- elif name in self.from_method_nodes.keys():
- return build_template_from_method(
- name,
- type_to_cls_dict=self.type_to_loader_dict,
- method_name=self.from_method_nodes[name],
- add_function=True,
- )
- return build_template_from_class(name, self.type_to_loader_dict, add_function=True)
- except ValueError as exc:
- raise ValueError(f"Chain {name} not found: {exc}") from exc
- except AttributeError as exc:
- logger.error(f"Chain {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- names = []
- for _, chain in self.type_to_loader_dict.items():
- chain_name = chain.function_name() if hasattr(chain, "function_name") else chain.__name__
- names.append(chain_name)
- return names
-
-
-chain_creator = ChainCreator()
diff --git a/src/backend/base/langflow/interface/chains/custom.py b/src/backend/base/langflow/interface/chains/custom.py
deleted file mode 100644
index 2a72f3471..000000000
--- a/src/backend/base/langflow/interface/chains/custom.py
+++ /dev/null
@@ -1,119 +0,0 @@
-from typing import Dict, Optional, Type, Union
-
-from langchain.base_language import BaseLanguageModel
-from langchain.chains import ConversationChain
-from langchain.chains.question_answering import load_qa_chain
-from langchain.memory.buffer import ConversationBufferMemory
-from langchain.schema import BaseMemory
-from pydantic.v1 import Field, root_validator
-
-from langflow.interface.base import CustomChain
-from langflow.interface.utils import extract_input_variables_from_prompt
-
-DEFAULT_SUFFIX = """"
-Current conversation:
-{history}
-Human: {input}
-{ai_prefix}"""
-
-
-class BaseCustomConversationChain(ConversationChain):
- """BaseCustomChain is a chain you can use to have a conversation with a custom character."""
-
- template: Optional[str]
-
- ai_prefix_value: Optional[str]
- """Field to use as the ai_prefix. It needs to be set and has to be in the template"""
-
- @root_validator(pre=False)
- def build_template(cls, values):
- format_dict = {}
- input_variables = extract_input_variables_from_prompt(values["template"])
-
- if values.get("ai_prefix_value", None) is None:
- values["ai_prefix_value"] = values["memory"].ai_prefix
-
- for key in input_variables:
- new_value = values.get(key, f"{{{key}}}")
- format_dict[key] = new_value
- if key == values.get("ai_prefix_value", None):
- values["memory"].ai_prefix = new_value
-
- values["template"] = values["template"].format(**format_dict)
-
- values["template"] = values["template"]
- values["input_variables"] = extract_input_variables_from_prompt(values["template"])
- values["prompt"].template = values["template"]
- values["prompt"].input_variables = values["input_variables"]
- return values
-
-
-class SeriesCharacterChain(BaseCustomConversationChain):
- """SeriesCharacterChain is a chain you can use to have a conversation with a character from a series."""
-
- character: str
- series: str
- template: Optional[str] = """I want you to act like {character} from {series}.
-I want you to respond and answer like {character}. do not write any explanations. only answer like {character}.
-You must know all of the knowledge of {character}.
-Current conversation:
-{history}
-Human: {input}
-{character}:"""
- memory: BaseMemory = Field(default_factory=ConversationBufferMemory)
- ai_prefix_value: Optional[str] = "character"
- """Default memory store."""
-
-
-class MidJourneyPromptChain(BaseCustomConversationChain):
- """MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts."""
-
- template: Optional[
- str
- ] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program.
- Your job is to provide detailed and creative descriptions that will inspire unique and interesting images from the AI.
- Keep in mind that the AI is capable of understanding a wide range of language and can interpret abstract concepts, so feel free to be as imaginative and descriptive as possible.
- For example, you could describe a scene from a futuristic city, or a surreal landscape filled with strange creatures.
- The more detailed and imaginative your description, the more interesting the resulting image will be. Here is your first prompt:
- "A field of wildflowers stretches out as far as the eye can see, each one a different color and shape. In the distance, a massive tree towers over the landscape, its branches reaching up to the sky like tentacles.\"
-
- Current conversation:
- {history}
- Human: {input}
- AI:""" # noqa: E501
-
-
-class TimeTravelGuideChain(BaseCustomConversationChain):
- template: Optional[
- str
- ] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information.
- Current conversation:
- {history}
- Human: {input}
- AI:""" # noqa: E501
-
-
-class CombineDocsChain(CustomChain):
- """Implementation of load_qa_chain function"""
-
- @staticmethod
- def function_name():
- return "load_qa_chain"
-
- @classmethod
- def initialize(cls, llm: BaseLanguageModel, chain_type: str):
- return load_qa_chain(llm=llm, chain_type=chain_type)
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
-
- def run(self, *args, **kwargs):
- return super().run(*args, **kwargs)
-
-
-CUSTOM_CHAINS: Dict[str, Type[Union[ConversationChain, CustomChain]]] = {
- "CombineDocsChain": CombineDocsChain,
- "SeriesCharacterChain": SeriesCharacterChain,
- "MidJourneyPromptChain": MidJourneyPromptChain,
- "TimeTravelGuideChain": TimeTravelGuideChain,
-}
diff --git a/src/backend/base/langflow/interface/custom/__init__.py b/src/backend/base/langflow/interface/custom/__init__.py
deleted file mode 100644
index 5b87e9fa3..000000000
--- a/src/backend/base/langflow/interface/custom/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from langflow.interface.custom.base import CustomComponentCreator
-from langflow.interface.custom.custom_component import CustomComponent
-
-__all__ = ["CustomComponentCreator", "CustomComponent"]
diff --git a/src/backend/base/langflow/interface/custom/base.py b/src/backend/base/langflow/interface/custom/base.py
deleted file mode 100644
index 573eacba1..000000000
--- a/src/backend/base/langflow/interface/custom/base.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from typing import Any, Dict, List, Optional, Type
-
-from loguru import logger
-
-from langflow.interface.base import LangChainTypeCreator
-
-# from langflow.interface.custom.custom import CustomComponent
-from langflow.interface.custom.custom_component import CustomComponent
-from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode
-
-
-class CustomComponentCreator(LangChainTypeCreator):
- type_name: str = "custom_components"
-
- @property
- def frontend_node_class(self) -> Type[CustomComponentFrontendNode]:
- return CustomComponentFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- self.type_dict: dict[str, Any] = {
- "CustomComponent": CustomComponent,
- }
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- from langflow.legacy_custom.customs import get_custom_nodes
-
- try:
- if name in get_custom_nodes(self.type_name).keys():
- return get_custom_nodes(self.type_name)[name]
- except ValueError as exc:
- raise ValueError(f"CustomComponent {name} not found: {exc}") from exc
- except AttributeError as exc:
- logger.error(f"CustomComponent {name} not loaded: {exc}")
- return None
- return None
-
- def to_list(self) -> List[str]:
- return list(self.type_to_loader_dict.keys())
-
-
-custom_component_creator = CustomComponentCreator()
diff --git a/src/backend/base/langflow/interface/custom_lists.py b/src/backend/base/langflow/interface/custom_lists.py
deleted file mode 100644
index 9b494e450..000000000
--- a/src/backend/base/langflow/interface/custom_lists.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import inspect
-from typing import Any
-
-from langchain import llms, memory, text_splitter
-from langchain_community import agent_toolkits, document_loaders, embeddings
-from langchain_community.chat_models import AzureChatOpenAI, ChatAnthropic, ChatOpenAI, ChatVertexAI
-
-from langflow.interface.agents.custom import CUSTOM_AGENTS
-from langflow.interface.chains.custom import CUSTOM_CHAINS
-from langflow.interface.importing.utils import import_class
-
-# LLMs
-llm_type_to_cls_dict = {}
-
-for k, v in llms.get_type_to_cls_dict().items():
- try:
- llm_type_to_cls_dict[k] = v()
- except Exception:
- pass
-llm_type_to_cls_dict["anthropic-chat"] = ChatAnthropic # type: ignore
-llm_type_to_cls_dict["azure-chat"] = AzureChatOpenAI # type: ignore
-llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore
-llm_type_to_cls_dict["vertexai-chat"] = ChatVertexAI # type: ignore
-
-
-# Toolkits
-toolkit_type_to_loader_dict: dict[str, Any] = {
- toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}")
- # if toolkit_name is lower case it is a loader
- for toolkit_name in agent_toolkits.__all__
- if toolkit_name.islower()
-}
-
-toolkit_type_to_cls_dict: dict[str, Any] = {
- toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}")
- # if toolkit_name is not lower case it is a class
- for toolkit_name in agent_toolkits.__all__
- if not toolkit_name.islower()
-}
-
-# Memories
-memory_type_to_cls_dict: dict[str, Any] = {
- memory_name: import_class(f"langchain.memory.{memory_name}") for memory_name in memory.__all__
-}
-
-
-# Embeddings
-embedding_type_to_cls_dict: dict[str, Any] = {
- embedding_name: import_class(f"langchain_community.embeddings.{embedding_name}")
- for embedding_name in embeddings.__all__
-}
-
-
-# Document Loaders
-documentloaders_type_to_cls_dict: dict[str, Any] = {
- documentloader_name: import_class(f"langchain_community.document_loaders.{documentloader_name}")
- for documentloader_name in document_loaders.__all__
-}
-
-# Text Splitters
-textsplitter_type_to_cls_dict: dict[str, Any] = dict(inspect.getmembers(text_splitter, inspect.isclass))
-
-# merge CUSTOM_AGENTS and CUSTOM_CHAINS
-CUSTOM_NODES = {**CUSTOM_AGENTS, **CUSTOM_CHAINS} # type: ignore
diff --git a/src/backend/base/langflow/interface/document_loaders/__init__.py b/src/backend/base/langflow/interface/document_loaders/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/document_loaders/base.py b/src/backend/base/langflow/interface/document_loaders/base.py
deleted file mode 100644
index 11bf0db42..000000000
--- a/src/backend/base/langflow/interface/document_loaders/base.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from typing import Dict, List, Optional, Type
-
-from loguru import logger
-
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
-from langflow.interface.utils import build_template_from_class
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode
-
-
-class DocumentLoaderCreator(LangChainTypeCreator):
- type_name: str = "documentloaders"
-
- @property
- def frontend_node_class(self) -> Type[DocumentLoaderFrontNode]:
- return DocumentLoaderFrontNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- return documentloaders_type_to_cls_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of a document loader."""
- try:
- return build_template_from_class(name, documentloaders_type_to_cls_dict)
- except ValueError as exc:
- raise ValueError(f"Documment Loader {name} not found") from exc
- except AttributeError as exc:
- logger.error(f"Documment Loader {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- settings_service = get_settings_service()
- return [
- documentloader.__name__
- for documentloader in self.type_to_loader_dict.values()
- if documentloader.__name__ in settings_service.settings.DOCUMENTLOADERS or settings_service.settings.DEV
- ]
-
-
-documentloader_creator = DocumentLoaderCreator()
diff --git a/src/backend/base/langflow/interface/embeddings/__init__.py b/src/backend/base/langflow/interface/embeddings/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/embeddings/base.py b/src/backend/base/langflow/interface/embeddings/base.py
deleted file mode 100644
index 5fd7ad3b0..000000000
--- a/src/backend/base/langflow/interface/embeddings/base.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from typing import Dict, List, Optional, Type
-
-from loguru import logger
-
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.custom_lists import embedding_type_to_cls_dict
-from langflow.interface.utils import build_template_from_class
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode
-
-
-class EmbeddingCreator(LangChainTypeCreator):
- type_name: str = "embeddings"
-
- @property
- def type_to_loader_dict(self) -> Dict:
- return embedding_type_to_cls_dict
-
- @property
- def frontend_node_class(self) -> Type[FrontendNode]:
- return EmbeddingFrontendNode
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of an embedding."""
- try:
- return build_template_from_class(name, embedding_type_to_cls_dict)
- except ValueError as exc:
- raise ValueError(f"Embedding {name} not found") from exc
-
- except AttributeError as exc:
- logger.error(f"Embedding {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- settings_service = get_settings_service()
- return [
- embedding.__name__
- for embedding in self.type_to_loader_dict.values()
- if embedding.__name__ in settings_service.settings.EMBEDDINGS or settings_service.settings.DEV
- ]
-
-
-embedding_creator = EmbeddingCreator()
diff --git a/src/backend/base/langflow/interface/importing/utils.py b/src/backend/base/langflow/interface/importing/utils.py
index 1b921d87b..963a51ccb 100644
--- a/src/backend/base/langflow/interface/importing/utils.py
+++ b/src/backend/base/langflow/interface/importing/utils.py
@@ -1,16 +1,7 @@
# This module is used to import any langchain class by name.
import importlib
-from typing import Any, Type
-
-from langchain.agents import Agent
-from langchain.base_language import BaseLanguageModel
-from langchain.chains.base import Chain
-from langchain.prompts import PromptTemplate
-from langchain.tools import BaseTool
-from langchain_core.language_models.chat_models import BaseChatModel
-
-from langflow.interface.wrappers.base import wrapper_creator
+from typing import Any
def import_module(module_path: str) -> Any:
@@ -27,134 +18,8 @@ def import_module(module_path: str) -> Any:
return getattr(module, object_name)
-def import_by_type(_type: str, name: str) -> Any:
- """Import class by type and name"""
- if _type is None:
- raise ValueError(f"Type cannot be None. Check if {name} is in the config file.")
- func_dict = {
- "agents": import_agent,
- "prompts": import_prompt,
- "models": {"llm": import_llm, "chat": import_chat_llm},
- "tools": import_tool,
- "chains": import_chain,
- "toolkits": import_toolkit,
- "wrappers": import_wrapper,
- "memory": import_memory,
- "embeddings": import_embedding,
- "vectorstores": import_vectorstore,
- "documentloaders": import_documentloader,
- "textsplitters": import_textsplitter,
- "utilities": import_utility,
- "retrievers": import_retriever,
- }
- if _type == "models":
- key = "chat" if "chat" in name.lower() else "llm"
- loaded_func = func_dict[_type][key] # type: ignore
- else:
- loaded_func = func_dict[_type]
-
- return loaded_func(name)
-
-
-def import_chat_llm(llm: str) -> BaseChatModel:
- """Import chat llm from llm name"""
- return import_class(f"langchain_community.chat_models.{llm}")
-
-
-def import_retriever(retriever: str) -> Any:
- """Import retriever from retriever name"""
- return import_module(f"from langchain.retrievers import {retriever}")
-
-
-def import_memory(memory: str) -> Any:
- """Import memory from memory name"""
- return import_module(f"from langchain.memory import {memory}")
-
-
def import_class(class_path: str) -> Any:
"""Import class from class path"""
module_path, class_name = class_path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, class_name)
-
-
-def import_prompt(prompt: str) -> Type[PromptTemplate]:
- """Import prompt from prompt name"""
- from langflow.interface.prompts.custom import CUSTOM_PROMPTS
-
- if prompt == "ZeroShotPrompt":
- return import_class("langchain.prompts.PromptTemplate")
- elif prompt in CUSTOM_PROMPTS:
- return CUSTOM_PROMPTS[prompt]
- return import_class(f"langchain.prompts.{prompt}")
-
-
-def import_wrapper(wrapper: str) -> Any:
- """Import wrapper from wrapper name"""
- if isinstance(wrapper_creator.type_dict, dict) and wrapper in wrapper_creator.type_dict:
- return wrapper_creator.type_dict.get(wrapper)
-
-
-def import_toolkit(toolkit: str) -> Any:
- """Import toolkit from toolkit name"""
- return import_module(f"from langchain.agents.agent_toolkits import {toolkit}")
-
-
-def import_agent(agent: str) -> Agent:
- """Import agent from agent name"""
- # check for custom agent
-
- return import_class(f"langchain.agents.{agent}")
-
-
-def import_llm(llm: str) -> BaseLanguageModel:
- """Import llm from llm name"""
- return import_class(f"langchain.llms.{llm}")
-
-
-def import_tool(tool: str) -> BaseTool:
- """Import tool from tool name"""
- from langflow.interface.tools.base import tool_creator
-
- if tool in tool_creator.type_to_loader_dict:
- return tool_creator.type_to_loader_dict[tool]["fcn"]
-
- return import_class(f"langchain.tools.{tool}")
-
-
-def import_chain(chain: str) -> Type[Chain]:
- """Import chain from chain name"""
- from langflow.interface.chains.custom import CUSTOM_CHAINS
-
- if chain in CUSTOM_CHAINS:
- return CUSTOM_CHAINS[chain]
- if chain == "SQLDatabaseChain":
- return import_class("langchain_experimental.sql.SQLDatabaseChain")
- return import_class(f"langchain.chains.{chain}")
-
-
-def import_embedding(embedding: str) -> Any:
- """Import embedding from embedding name"""
- return import_class(f"langchain_community.embeddings.{embedding}")
-
-
-def import_vectorstore(vectorstore: str) -> Any:
- """Import vectorstore from vectorstore name"""
- return import_class(f"langchain_community.vectorstores.{vectorstore}")
-
-
-def import_documentloader(documentloader: str) -> Any:
- """Import documentloader from documentloader name"""
- return import_class(f"langchain_community.document_loaders.{documentloader}")
-
-
-def import_textsplitter(textsplitter: str) -> Any:
- """Import textsplitter from textsplitter name"""
- return import_class(f"langchain.text_splitter.{textsplitter}")
-
-
-def import_utility(utility: str) -> Any:
- """Import utility from utility name"""
- if utility == "SQLDatabase":
- return import_class(f"langchain_community.sql_database.{utility}")
- return import_class(f"langchain_community.utilities.{utility}")
diff --git a/src/backend/base/langflow/interface/initialize/loading.py b/src/backend/base/langflow/interface/initialize/loading.py
index 6001258bc..03de827b3 100644
--- a/src/backend/base/langflow/interface/initialize/loading.py
+++ b/src/backend/base/langflow/interface/initialize/loading.py
@@ -1,32 +1,13 @@
import inspect
import json
import os
-from typing import TYPE_CHECKING, Any, Callable, Dict, Sequence, Type
+from typing import TYPE_CHECKING, Any, Type
import orjson
-from langchain.agents import agent as agent_module
-from langchain.agents.agent import AgentExecutor
-from langchain.agents.agent_toolkits.base import BaseToolkit
-from langchain.agents.tools import BaseTool
-from langchain.chains.base import Chain
-from langchain.document_loaders.base import BaseLoader
-from langchain_community.vectorstores import VectorStore
-from langchain_core.documents import Document
from loguru import logger
-from pydantic import ValidationError
-from langflow.interface.custom.eval import eval_custom_component_code
-from langflow.interface.importing.utils import import_by_type
-from langflow.interface.initialize.llm import initialize_vertexai
-from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type, handle_partial_variables
-from langflow.interface.initialize.vector_store import vecstore_initializer
-from langflow.interface.retrievers.base import retriever_creator
-from langflow.interface.toolkits.base import toolkits_creator
-from langflow.interface.utils import load_file_into_dict
-from langflow.interface.wrappers.base import wrapper_creator
+from langflow.custom.eval import eval_custom_component_code
from langflow.schema.schema import Record
-from langflow.utils import validate
-from langflow.utils.util import unescape_string
if TYPE_CHECKING:
from langflow.custom import CustomComponent
@@ -39,36 +20,19 @@ async def instantiate_class(
user_id=None,
) -> Any:
"""Instantiate class from module type and key, and params"""
- from langflow.interface.custom_lists import CUSTOM_NODES
vertex_type = vertex.vertex_type
base_type = vertex.base_type
params = vertex.params
params = convert_params_to_sets(params)
params = convert_kwargs(params)
-
- if vertex_type in CUSTOM_NODES:
- if custom_node := CUSTOM_NODES.get(vertex_type):
- if hasattr(custom_node, "initialize"):
- return custom_node.initialize(**params)
- if callable(custom_node):
- return custom_node(**params)
- raise ValueError(f"Custom node {vertex_type} is not callable")
logger.debug(f"Instantiating {vertex_type} of type {base_type}")
if not base_type:
raise ValueError("No base type provided for vertex")
if base_type == "custom_components":
return await instantiate_custom_component(params, user_id, vertex, fallback_to_env_vars=fallback_to_env_vars)
- class_object = import_by_type(_type=base_type, name=vertex_type)
- return await instantiate_based_on_type(
- class_object=class_object,
- base_type=base_type,
- node_type=vertex_type,
- params=params,
- user_id=user_id,
- vertex=vertex,
- fallback_to_env_vars=fallback_to_env_vars,
- )
+ else:
+ raise ValueError(f"Base type {base_type} not found.")
def convert_params_to_sets(params):
@@ -95,45 +59,6 @@ def convert_kwargs(params):
return params
-async def instantiate_based_on_type(class_object, base_type, node_type, params, user_id, vertex, fallback_to_env_vars):
- if base_type == "agents":
- return instantiate_agent(node_type, class_object, params)
- elif base_type == "prompts":
- return instantiate_prompt(node_type, class_object, params)
- elif base_type == "tools":
- tool = instantiate_tool(node_type, class_object, params)
- if hasattr(tool, "name") and isinstance(tool, BaseTool):
- # tool name shouldn't contain spaces
- tool.name = tool.name.replace(" ", "_")
- return tool
- elif base_type == "toolkits":
- return instantiate_toolkit(node_type, class_object, params)
- elif base_type == "embeddings":
- return instantiate_embedding(node_type, class_object, params)
- elif base_type == "vectorstores":
- return instantiate_vectorstore(class_object, params)
- elif base_type == "documentloaders":
- return instantiate_documentloader(node_type, class_object, params)
- elif base_type == "textsplitters":
- return instantiate_textsplitter(class_object, params)
- elif base_type == "utilities":
- return instantiate_utility(node_type, class_object, params)
- elif base_type == "chains":
- return instantiate_chains(node_type, class_object, params)
- elif base_type == "models":
- return instantiate_llm(node_type, class_object, params)
- elif base_type == "retrievers":
- return instantiate_retriever(node_type, class_object, params)
- elif base_type == "memory":
- return instantiate_memory(node_type, class_object, params)
- elif base_type == "custom_components":
- return await instantiate_custom_component(params, user_id, vertex, fallback_to_env_vars=fallback_to_env_vars)
- elif base_type == "wrappers":
- return instantiate_wrapper(node_type, class_object, params)
- else:
- return class_object(**params)
-
-
def update_params_with_load_from_db_fields(
custom_component: "CustomComponent", params, load_from_db_fields, fallback_to_env_vars=False
):
@@ -200,351 +125,3 @@ async def instantiate_custom_component(params, user_id, vertex, fallback_to_env_
if not isinstance(custom_repr, str):
custom_repr = str(custom_repr)
return custom_component, build_result, {"repr": custom_repr}
-
-
-def instantiate_wrapper(node_type, class_object, params):
- if node_type in wrapper_creator.from_method_nodes:
- method = wrapper_creator.from_method_nodes[node_type]
- if class_method := getattr(class_object, method, None):
- return class_method(**params)
- raise ValueError(f"Method {method} not found in {class_object}")
- return class_object(**params)
-
-
-def instantiate_llm(node_type, class_object, params: Dict):
- # This is a workaround so JinaChat works until streaming is implemented
- # if "openai_api_base" in params and "jina" in params["openai_api_base"]:
- # False if condition is True
- if "VertexAI" in node_type:
- return initialize_vertexai(class_object=class_object, params=params)
- # max_tokens sometimes is a string and should be an int
- if "max_tokens" in params:
- if isinstance(params["max_tokens"], str) and params["max_tokens"].isdigit():
- params["max_tokens"] = int(params["max_tokens"])
- elif not isinstance(params.get("max_tokens"), int):
- params.pop("max_tokens", None)
- return class_object(**params)
-
-
-def instantiate_memory(node_type, class_object, params):
- # process input_key and output_key to remove them if
- # they are empty strings
- if node_type == "ConversationEntityMemory":
- params.pop("memory_key", None)
-
- for key in ["input_key", "output_key"]:
- if key in params and (params[key] == "" or not params[key]):
- params.pop(key)
-
- try:
- if "retriever" in params and hasattr(params["retriever"], "as_retriever"):
- params["retriever"] = params["retriever"].as_retriever()
- return class_object(**params)
- # I want to catch a specific attribute error that happens
- # when the object does not have a cursor attribute
- except Exception as exc:
- if "object has no attribute 'cursor'" in str(exc) or 'object has no field "conn"' in str(exc):
- raise AttributeError(
- (
- "Failed to build connection to database."
- f" Please check your connection string and try again. Error: {exc}"
- )
- ) from exc
- raise exc
-
-
-def instantiate_retriever(node_type, class_object, params):
- if "retriever" in params and hasattr(params["retriever"], "as_retriever"):
- params["retriever"] = params["retriever"].as_retriever()
- if node_type in retriever_creator.from_method_nodes:
- method = retriever_creator.from_method_nodes[node_type]
- if class_method := getattr(class_object, method, None):
- return class_method(**params)
- raise ValueError(f"Method {method} not found in {class_object}")
- return class_object(**params)
-
-
-def instantiate_chains(node_type, class_object: Type[Chain], params: Dict):
- from langflow.interface.chains.base import chain_creator
-
- if "retriever" in params and hasattr(params["retriever"], "as_retriever"):
- params["retriever"] = params["retriever"].as_retriever()
- if node_type in chain_creator.from_method_nodes:
- method = chain_creator.from_method_nodes[node_type]
- if class_method := getattr(class_object, method, None):
- return class_method(**params)
- raise ValueError(f"Method {method} not found in {class_object}")
-
- return class_object(**params)
-
-
-def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: Dict):
- from langflow.interface.agents.base import agent_creator
-
- if node_type in agent_creator.from_method_nodes:
- method = agent_creator.from_method_nodes[node_type]
- if class_method := getattr(class_object, method, None):
- agent = class_method(**params)
- tools = params.get("tools", [])
- return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, handle_parsing_errors=True)
- return load_agent_executor(class_object, params)
-
-
-def instantiate_prompt(node_type, class_object, params: Dict):
- params, prompt = handle_node_type(node_type, class_object, params)
- format_kwargs = handle_format_kwargs(prompt, params)
- # Now we'll use partial_format to format the prompt
- if format_kwargs:
- prompt = handle_partial_variables(prompt, format_kwargs)
- return prompt, format_kwargs
-
-
-def instantiate_tool(node_type, class_object: Type[BaseTool], params: Dict):
- if node_type == "JsonSpec":
- if file_dict := load_file_into_dict(params.pop("path")):
- params["dict_"] = file_dict
- else:
- raise ValueError("Invalid file")
- return class_object(**params)
- elif node_type == "PythonFunctionTool":
- from langflow.interface.custom.utils import get_function
-
- params["func"] = get_function(params.get("code"))
- return class_object(**params)
- elif node_type == "PythonFunction":
- function_string = params["code"]
- if isinstance(function_string, str):
- return validate.eval_function(function_string)
- raise ValueError("Function should be a string")
- elif node_type.lower() == "tool":
- return class_object(**params)
- return class_object(**params)
-
-
-def instantiate_toolkit(node_type, class_object: Type[BaseToolkit], params: Dict):
- loaded_toolkit = class_object(**params)
- # Commenting this out for now to use toolkits as normal tools
- # if toolkits_creator.has_create_function(node_type):
- # return load_toolkits_executor(node_type, loaded_toolkit, params)
- if isinstance(loaded_toolkit, BaseToolkit):
- return loaded_toolkit.get_tools()
- return loaded_toolkit
-
-
-def instantiate_embedding(node_type, class_object, params: Dict):
- params.pop("model", None)
- params.pop("headers", None)
-
- if "VertexAI" in node_type:
- return initialize_vertexai(class_object=class_object, params=params)
-
- if "OpenAIEmbedding" in node_type:
- params["disallowed_special"] = ()
-
- try:
- return class_object(**params)
- except ValidationError:
- params = {key: value for key, value in params.items() if key in class_object.model_fields}
- return class_object(**params)
-
-
-def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict):
- search_kwargs = params.pop("search_kwargs", {})
- if search_kwargs == {"yourkey": "value"}:
- search_kwargs = {}
- # clean up docs or texts to have only documents
- if "texts" in params:
- params["documents"] = params.pop("texts")
- if "documents" in params:
- params["documents"] = [doc for doc in params["documents"] if isinstance(doc, Document)]
- if initializer := vecstore_initializer.get(class_object.__name__):
- vecstore = initializer(class_object, params)
- else:
- if "texts" in params:
- params["documents"] = params.pop("texts")
- vecstore = class_object.from_documents(**params)
-
- # ! This might not work. Need to test
- if search_kwargs and hasattr(vecstore, "as_retriever"):
- vecstore = vecstore.as_retriever(search_kwargs=search_kwargs)
-
- return vecstore
-
-
-def instantiate_documentloader(node_type: str, class_object: Type[BaseLoader], params: Dict):
- if "file_filter" in params:
- # file_filter will be a string but we need a function
- # that will be used to filter the files using file_filter
- # like lambda x: x.endswith(".txt") but as we don't know
- # anything besides the string, we will simply check if the string is
- # in x and if it is, we will return True
- file_filter = params.pop("file_filter")
- extensions = file_filter.split(",")
- params["file_filter"] = lambda x: any(extension.strip() in x for extension in extensions)
- metadata = params.pop("metadata", None)
- if metadata and isinstance(metadata, str):
- try:
- metadata = orjson.loads(metadata)
- except json.JSONDecodeError as exc:
- raise ValueError("The metadata you provided is not a valid JSON string.") from exc
-
- if node_type == "WebBaseLoader":
- if web_path := params.pop("web_path", None):
- params["web_paths"] = [web_path]
-
- docs = class_object(**params).load()
- # Now if metadata is an empty dict, we will not add it to the documents
- if metadata:
- for doc in docs:
- # If the document already has metadata, we will not overwrite it
- if not doc.metadata:
- doc.metadata = metadata
- else:
- doc.metadata.update(metadata)
-
- return docs
-
-
-def instantiate_textsplitter(
- class_object,
- params: Dict,
-):
- try:
- documents = params.pop("documents")
- if not isinstance(documents, list):
- documents = [documents]
- except KeyError as exc:
- raise ValueError(
- "The source you provided did not load correctly or was empty."
- "Try changing the chunk_size of the Text Splitter."
- ) from exc
-
- if ("separator_type" in params and params["separator_type"] == "Text") or "separator_type" not in params:
- params.pop("separator_type", None)
- # separators might come in as an escaped string like \\n
- # so we need to convert it to a string
- if "separators" in params:
- if isinstance(params["separators"], str):
- params["separators"] = unescape_string(params["separators"])
- elif isinstance(params["separators"], list):
- params["separators"] = [unescape_string(separator) for separator in params["separators"]]
- text_splitter = class_object(**params)
- else:
- from langchain.text_splitter import Language
-
- language = params.pop("separator_type", None)
- params["language"] = Language(language)
- params.pop("separators", None)
-
- text_splitter = class_object.from_language(**params)
- return text_splitter.split_documents(documents)
-
-
-def instantiate_utility(node_type, class_object, params: Dict):
- if node_type == "SQLDatabase":
- return class_object.from_uri(params.pop("uri"))
- return class_object(**params)
-
-
-def replace_zero_shot_prompt_with_prompt_template(nodes):
- """Replace ZeroShotPrompt with PromptTemplate"""
- for node in nodes:
- if node["data"]["type"] == "ZeroShotPrompt":
- # Build Prompt Template
- tools = [
- tool
- for tool in nodes
- if tool["type"] != "chatOutputNode" and "Tool" in tool["data"]["node"]["base_classes"]
- ]
- node["data"] = build_prompt_template(prompt=node["data"], tools=tools)
- break
- return nodes
-
-
-def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs):
- """Load agent executor from agent class, tools and chain"""
- allowed_tools: Sequence[BaseTool] = params.get("allowed_tools", [])
- llm_chain = params["llm_chain"]
- # agent has hidden args for memory. might need to be support
- # memory = params["memory"]
- # if allowed_tools is not a list or set, make it a list
- if not isinstance(allowed_tools, (list, set)) and isinstance(allowed_tools, BaseTool):
- allowed_tools = [allowed_tools]
- tool_names = [tool.name for tool in allowed_tools]
- # Agent class requires an output_parser but Agent classes
- # have a default output_parser.
- agent = agent_class(allowed_tools=tool_names, llm_chain=llm_chain) # type: ignore
- return AgentExecutor.from_agent_and_tools(
- agent=agent,
- tools=allowed_tools,
- handle_parsing_errors=True,
- # memory=memory,
- **kwargs,
- )
-
-
-def load_toolkits_executor(node_type: str, toolkit: BaseToolkit, params: dict):
- create_function: Callable = toolkits_creator.get_create_function(node_type)
- if llm := params.get("llm"):
- return create_function(llm=llm, toolkit=toolkit)
-
-
-def build_prompt_template(prompt, tools):
- """Build PromptTemplate from ZeroShotPrompt"""
- prefix = prompt["node"]["template"]["prefix"]["value"]
- suffix = prompt["node"]["template"]["suffix"]["value"]
- format_instructions = prompt["node"]["template"]["format_instructions"]["value"]
-
- tool_strings = "\n".join(
- [f"{tool['data']['node']['name']}: {tool['data']['node']['description']}" for tool in tools]
- )
- tool_names = ", ".join([tool["data"]["node"]["name"] for tool in tools])
- format_instructions = format_instructions.format(tool_names=tool_names)
- value = "\n\n".join([prefix, tool_strings, format_instructions, suffix])
-
- prompt["type"] = "PromptTemplate"
-
- prompt["node"] = {
- "template": {
- "_type": "prompt",
- "input_variables": {
- "type": "str",
- "required": True,
- "placeholder": "",
- "list": True,
- "show": False,
- "multiline": False,
- },
- "template": {
- "type": "str",
- "required": True,
- "placeholder": "",
- "list": False,
- "show": True,
- "multiline": True,
- "value": value,
- },
- "template_format": {
- "type": "str",
- "required": False,
- "placeholder": "",
- "list": False,
- "show": False,
- "multline": False,
- "value": "f-string",
- },
- "validate_template": {
- "type": "bool",
- "required": False,
- "placeholder": "",
- "list": False,
- "show": False,
- "multline": False,
- "value": True,
- },
- },
- "description": "Schema to represent a prompt for an LLM.",
- "base_classes": ["BasePromptTemplate"],
- }
-
- return prompt
diff --git a/src/backend/base/langflow/interface/initialize/utils.py b/src/backend/base/langflow/interface/initialize/utils.py
index 0ef76836b..c09525a6c 100644
--- a/src/backend/base/langflow/interface/initialize/utils.py
+++ b/src/backend/base/langflow/interface/initialize/utils.py
@@ -4,9 +4,10 @@ from typing import Any, Dict, List
import orjson
from langchain.agents import ZeroShotAgent
-from langchain.schema import BaseOutputParser, Document
from langflow.services.database.models.base import orjson_dumps
+from langchain_core.documents import Document
+from langchain_core.output_parsers import BaseOutputParser
def handle_node_type(node_type, class_object, params: Dict):
diff --git a/src/backend/base/langflow/interface/initialize/vector_store.py b/src/backend/base/langflow/interface/initialize/vector_store.py
index 8e596298c..8b9034e65 100644
--- a/src/backend/base/langflow/interface/initialize/vector_store.py
+++ b/src/backend/base/langflow/interface/initialize/vector_store.py
@@ -6,12 +6,12 @@ from langchain_community.vectorstores import (
FAISS,
Chroma,
MongoDBAtlasVectorSearch,
- Pinecone,
Qdrant,
SupabaseVectorStore,
Weaviate,
)
from langchain_core.documents import Document
+from langchain_pinecone import Pinecone
def docs_in_params(params: dict) -> bool:
diff --git a/src/backend/base/langflow/interface/listing.py b/src/backend/base/langflow/interface/listing.py
index a831f1098..a51e676db 100644
--- a/src/backend/base/langflow/interface/listing.py
+++ b/src/backend/base/langflow/interface/listing.py
@@ -21,7 +21,7 @@ class AllTypesDict(LazyLoadDictBase):
from langflow.interface.types import get_all_types_dict
settings_service = get_settings_service()
- return get_all_types_dict(settings_service.settings.COMPONENTS_PATH)
+ return get_all_types_dict(settings_service.settings.components_path)
lazy_load_dict = AllTypesDict()
diff --git a/src/backend/base/langflow/interface/llms/__init__.py b/src/backend/base/langflow/interface/llms/__init__.py
deleted file mode 100644
index c5d7186fb..000000000
--- a/src/backend/base/langflow/interface/llms/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langflow.interface.llms.base import LLMCreator
-
-__all__ = ["LLMCreator"]
diff --git a/src/backend/base/langflow/interface/llms/base.py b/src/backend/base/langflow/interface/llms/base.py
deleted file mode 100644
index b7d91d674..000000000
--- a/src/backend/base/langflow/interface/llms/base.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from typing import Dict, List, Optional, Type
-
-from loguru import logger
-
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.custom_lists import llm_type_to_cls_dict
-from langflow.interface.utils import build_template_from_class
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.llms import LLMFrontendNode
-
-
-class LLMCreator(LangChainTypeCreator):
- type_name: str = "models"
-
- @property
- def frontend_node_class(self) -> Type[LLMFrontendNode]:
- return LLMFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- self.type_dict = llm_type_to_cls_dict
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of an llm."""
- try:
- return build_template_from_class(name, llm_type_to_cls_dict)
- except ValueError as exc:
- raise ValueError("LLM not found") from exc
-
- except AttributeError as exc:
- logger.error(f"LLM {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- settings_service = get_settings_service()
- return [
- llm.__name__
- for llm in self.type_to_loader_dict.values()
- if llm.__name__ in settings_service.settings.LLMS or settings_service.settings.DEV
- ]
-
-
-llm_creator = LLMCreator()
diff --git a/src/backend/base/langflow/interface/memories/__init__.py b/src/backend/base/langflow/interface/memories/__init__.py
deleted file mode 100644
index 845eb29fe..000000000
--- a/src/backend/base/langflow/interface/memories/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langflow.interface.memories.base import MemoryCreator
-
-__all__ = ["MemoryCreator"]
diff --git a/src/backend/base/langflow/interface/memories/base.py b/src/backend/base/langflow/interface/memories/base.py
deleted file mode 100644
index ea0eabbf1..000000000
--- a/src/backend/base/langflow/interface/memories/base.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from typing import ClassVar, Dict, List, Optional, Type
-
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.custom_lists import memory_type_to_cls_dict
-from langflow.interface.utils import build_template_from_class
-from langflow.legacy_custom.customs import get_custom_nodes
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.frontend_node.memories import MemoryFrontendNode
-from langflow.utils.util import build_template_from_method
-from loguru import logger
-
-
-class MemoryCreator(LangChainTypeCreator):
- type_name: str = "memories"
-
- from_method_nodes: ClassVar[Dict] = {
- "ZepChatMessageHistory": "__init__",
- "SQLiteEntityStore": "__init__",
- }
-
- @property
- def frontend_node_class(self) -> Type[FrontendNode]:
- """The class type of the FrontendNode created in frontend_node."""
- return MemoryFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- self.type_dict = memory_type_to_cls_dict
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of a memory."""
- try:
- if name in get_custom_nodes(self.type_name).keys():
- return get_custom_nodes(self.type_name)[name]
- elif name in self.from_method_nodes:
- return build_template_from_method(
- name,
- type_to_cls_dict=memory_type_to_cls_dict,
- method_name=self.from_method_nodes[name],
- )
- return build_template_from_class(name, memory_type_to_cls_dict)
- except ValueError as exc:
- raise ValueError("Memory not found") from exc
- except AttributeError as exc:
- logger.error(f"Memory {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- settings_service = get_settings_service()
- return [
- memory.__name__
- for memory in self.type_to_loader_dict.values()
- if memory.__name__ in settings_service.settings.MEMORIES or settings_service.settings.DEV
- ]
-
-
-memory_creator = MemoryCreator()
diff --git a/src/backend/base/langflow/interface/prompts/__init__.py b/src/backend/base/langflow/interface/prompts/__init__.py
deleted file mode 100644
index 2a81e8bf0..000000000
--- a/src/backend/base/langflow/interface/prompts/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langflow.interface.prompts.base import PromptCreator
-
-__all__ = ["PromptCreator"]
diff --git a/src/backend/base/langflow/interface/prompts/base.py b/src/backend/base/langflow/interface/prompts/base.py
deleted file mode 100644
index b9662e0cd..000000000
--- a/src/backend/base/langflow/interface/prompts/base.py
+++ /dev/null
@@ -1,64 +0,0 @@
-from typing import Dict, List, Optional, Type
-
-from langchain import prompts
-from loguru import logger
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.importing.utils import import_class
-from langflow.interface.utils import build_template_from_class
-from langflow.legacy_custom.customs import get_custom_nodes
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.prompts import PromptFrontendNode
-
-
-class PromptCreator(LangChainTypeCreator):
- type_name: str = "prompts"
-
- @property
- def frontend_node_class(self) -> Type[PromptFrontendNode]:
- return PromptFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- settings_service = get_settings_service()
- if self.type_dict is None:
- self.type_dict = {
- prompt_name: import_class(f"langchain.prompts.{prompt_name}")
- # if prompt_name is not lower case it is a class
- for prompt_name in prompts.__all__
- }
- # Merge CUSTOM_PROMPTS into self.type_dict
- from langflow.interface.prompts.custom import CUSTOM_PROMPTS
-
- self.type_dict.update(CUSTOM_PROMPTS)
- # Now filter according to settings.prompts
- self.type_dict = {
- name: prompt
- for name, prompt in self.type_dict.items()
- if name in settings_service.settings.PROMPTS or settings_service.settings.DEV
- }
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- try:
- if name in get_custom_nodes(self.type_name).keys():
- return get_custom_nodes(self.type_name)[name]
- return build_template_from_class(name, self.type_to_loader_dict)
- except ValueError as exc:
- # raise ValueError("Prompt not found") from exc
- logger.error(f"Prompt {name} not found: {exc}")
- except AttributeError as exc:
- logger.error(f"Prompt {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- custom_prompts = get_custom_nodes("prompts")
- # library_prompts = [
- # prompt.__annotations__["return"].__name__
- # for prompt in self.type_to_loader_dict.values()
- # if prompt.__annotations__["return"].__name__ in settings.prompts
- # or settings.dev
- # ]
- return list(self.type_to_loader_dict.keys()) + list(custom_prompts.keys())
-
-
-prompt_creator = PromptCreator()
diff --git a/src/backend/base/langflow/interface/prompts/custom.py b/src/backend/base/langflow/interface/prompts/custom.py
deleted file mode 100644
index 202fbe409..000000000
--- a/src/backend/base/langflow/interface/prompts/custom.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from typing import Dict, List, Optional, Type
-
-from langchain.prompts import PromptTemplate
-from pydantic.v1 import root_validator
-
-from langflow.interface.utils import extract_input_variables_from_prompt
-
-# Steps to create a BaseCustomPrompt:
-# 1. Create a prompt template that endes with:
-# Current conversation:
-# {history}
-# Human: {input}
-# {ai_prefix}:
-# 2. Create a class that inherits from BaseCustomPrompt
-# 3. Add the following class attributes:
-# template: str = ""
-# description: Optional[str]
-# ai_prefix: Optional[str] = "{ai_prefix}"
-# 3.1. The ai_prefix should be a value in input_variables
-# SeriesCharacterPrompt is a working example
-# If used in a LLMChain, with a Memory module, it will work as expected
-# We should consider creating ConversationalChains that expose custom parameters
-# That way it will be easier to create custom prompts
-
-
-class BaseCustomPrompt(PromptTemplate):
- template: str = ""
- description: Optional[str]
- ai_prefix: Optional[str]
-
- @root_validator(pre=False)
- def build_template(cls, values):
- format_dict = {}
- ai_prefix_format_dict = {}
- for key in values.get("input_variables", []):
- new_value = values.get(key, f"{{{key}}}")
- format_dict[key] = new_value
- if key in values["ai_prefix"]:
- ai_prefix_format_dict[key] = new_value
-
- values["ai_prefix"] = values["ai_prefix"].format(**ai_prefix_format_dict)
- values["template"] = values["template"].format(**format_dict)
-
- values["template"] = values["template"]
- values["input_variables"] = extract_input_variables_from_prompt(values["template"])
- return values
-
-
-class SeriesCharacterPrompt(BaseCustomPrompt):
- # Add a very descriptive description for the prompt generator
- description: Optional[str] = "A prompt that asks the AI to act like a character from a series."
- character: str
- series: str
- template: str = """I want you to act like {character} from {series}.
-I want you to respond and answer like {character}. do not write any explanations. only answer like {character}.
-You must know all of the knowledge of {character}.
-
-Current conversation:
-{history}
-Human: {input}
-{character}:"""
-
- ai_prefix: str = "{character}"
- input_variables: List[str] = ["character", "series"]
-
-
-CUSTOM_PROMPTS: Dict[str, Type[BaseCustomPrompt]] = {"SeriesCharacterPrompt": SeriesCharacterPrompt}
diff --git a/src/backend/base/langflow/interface/retrievers/__init__.py b/src/backend/base/langflow/interface/retrievers/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/retrievers/base.py b/src/backend/base/langflow/interface/retrievers/base.py
deleted file mode 100644
index 6eefe18db..000000000
--- a/src/backend/base/langflow/interface/retrievers/base.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from typing import Any, ClassVar, Dict, List, Optional, Type
-
-from langchain_community import retrievers
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.importing.utils import import_class
-from langflow.interface.utils import build_template_from_class
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.retrievers import RetrieverFrontendNode
-from langflow.utils.util import build_template_from_method
-from loguru import logger
-
-
-class RetrieverCreator(LangChainTypeCreator):
- type_name: str = "retrievers"
-
- from_method_nodes: ClassVar[Dict] = {
- "MultiQueryRetriever": "from_llm",
- "ZepRetriever": "__init__",
- }
-
- @property
- def frontend_node_class(self) -> Type[RetrieverFrontendNode]:
- return RetrieverFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- self.type_dict: dict[str, Any] = {
- retriever_name: import_class(f"langchain_community.retrievers.{retriever_name}")
- for retriever_name in retrievers.__all__
- }
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of an embedding."""
- try:
- if name in self.from_method_nodes:
- return build_template_from_method(
- name,
- type_to_cls_dict=self.type_to_loader_dict,
- method_name=self.from_method_nodes[name],
- )
- else:
- return build_template_from_class(name, type_to_cls_dict=self.type_to_loader_dict)
- except ValueError as exc:
- raise ValueError(f"Retriever {name} not found") from exc
- except AttributeError as exc:
- logger.error(f"Retriever {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- settings_service = get_settings_service()
- return [
- retriever
- for retriever in self.type_to_loader_dict.keys()
- if retriever in settings_service.settings.RETRIEVERS or settings_service.settings.DEV
- ]
-
-
-retriever_creator = RetrieverCreator()
diff --git a/src/backend/base/langflow/interface/text_splitters/__init__.py b/src/backend/base/langflow/interface/text_splitters/__init__.py
deleted file mode 100644
index 4bb9dd1b0..000000000
--- a/src/backend/base/langflow/interface/text_splitters/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langflow.interface.text_splitters.base import TextSplitterCreator
-
-__all__ = ["TextSplitterCreator"]
diff --git a/src/backend/base/langflow/interface/text_splitters/base.py b/src/backend/base/langflow/interface/text_splitters/base.py
deleted file mode 100644
index 69d9799b3..000000000
--- a/src/backend/base/langflow/interface/text_splitters/base.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from typing import Dict, List, Optional, Type
-
-from loguru import logger
-
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.custom_lists import textsplitter_type_to_cls_dict
-from langflow.interface.utils import build_template_from_class
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode
-
-
-class TextSplitterCreator(LangChainTypeCreator):
- type_name: str = "textsplitters"
-
- @property
- def frontend_node_class(self) -> Type[TextSplittersFrontendNode]:
- return TextSplittersFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- return textsplitter_type_to_cls_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of a text splitter."""
- try:
- return build_template_from_class(name, textsplitter_type_to_cls_dict)
- except ValueError as exc:
- raise ValueError(f"Text Splitter {name} not found") from exc
- except AttributeError as exc:
- logger.error(f"Text Splitter {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- settings_service = get_settings_service()
- return [
- textsplitter.__name__
- for textsplitter in self.type_to_loader_dict.values()
- if textsplitter.__name__ in settings_service.settings.TEXTSPLITTERS or settings_service.settings.DEV
- ]
-
-
-textsplitter_creator = TextSplitterCreator()
diff --git a/src/backend/base/langflow/interface/toolkits/__init__.py b/src/backend/base/langflow/interface/toolkits/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/toolkits/base.py b/src/backend/base/langflow/interface/toolkits/base.py
deleted file mode 100644
index eca7ae3b7..000000000
--- a/src/backend/base/langflow/interface/toolkits/base.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import warnings
-from typing import Callable, Dict, List, Optional
-
-from langchain.agents import agent_toolkits
-from loguru import logger
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.importing.utils import import_class, import_module
-from langflow.interface.utils import build_template_from_class
-from langflow.services.deps import get_settings_service
-
-
-class ToolkitCreator(LangChainTypeCreator):
- type_name: str = "toolkits"
- all_types: List[str] = agent_toolkits.__all__
- create_functions: Dict = {
- "JsonToolkit": [],
- "SQLDatabaseToolkit": [],
- "OpenAPIToolkit": ["create_openapi_agent"],
- "VectorStoreToolkit": [
- "create_vectorstore_agent",
- "create_vectorstore_router_agent",
- "VectorStoreInfo",
- ],
- "ZapierToolkit": [],
- "PandasToolkit": ["create_pandas_dataframe_agent"],
- "CSVToolkit": ["create_csv_agent"],
- }
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- with warnings.catch_warnings():
- warnings.simplefilter("ignore")
- settings_service = get_settings_service()
- self.type_dict = {
- toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
- # if toolkit_name is not lower case it is a class
- for toolkit_name in agent_toolkits.__all__
- if not toolkit_name.islower() and toolkit_name in settings_service.settings.TOOLKITS
- }
-
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- try:
- template = build_template_from_class(name, self.type_to_loader_dict)
- # add Tool to base_classes
- if "toolkit" in name.lower() and template:
- template["base_classes"].append("Tool")
- return template
- except ValueError as exc:
- raise ValueError("Toolkit not found") from exc
- except AttributeError as exc:
- logger.error(f"Toolkit {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- return list(self.type_to_loader_dict.keys())
-
- def get_create_function(self, name: str) -> Callable:
- if loader_name := self.create_functions.get(name):
- return import_module(f"from langchain.agents.agent_toolkits import {loader_name[0]}")
- else:
- raise ValueError("Toolkit not found")
-
- def has_create_function(self, name: str) -> bool:
- # check if the function list is not empty
- return bool(self.create_functions.get(name, None))
-
-
-toolkits_creator = ToolkitCreator()
diff --git a/src/backend/base/langflow/interface/toolkits/custom.py b/src/backend/base/langflow/interface/toolkits/custom.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/tools/__init__.py b/src/backend/base/langflow/interface/tools/__init__.py
deleted file mode 100644
index 148892e90..000000000
--- a/src/backend/base/langflow/interface/tools/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langflow.interface.tools.base import ToolCreator
-
-__all__ = ["ToolCreator"]
diff --git a/src/backend/base/langflow/interface/tools/base.py b/src/backend/base/langflow/interface/tools/base.py
deleted file mode 100644
index f64192a3b..000000000
--- a/src/backend/base/langflow/interface/tools/base.py
+++ /dev/null
@@ -1,170 +0,0 @@
-from typing import Dict, List, Optional
-
-from langchain.agents.load_tools import _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS
-
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.tools.constants import ALL_TOOLS_NAMES, CUSTOM_TOOLS, FILE_TOOLS, OTHER_TOOLS
-from langflow.interface.tools.util import get_tool_params
-from langflow.legacy_custom import customs
-from langflow.services.deps import get_settings_service
-from langflow.template.field.base import TemplateField
-from langflow.template.template.base import Template
-from langflow.utils import util
-from langflow.utils.logger import logger
-from langflow.interface.utils import build_template_from_class
-
-TOOL_INPUTS = {
- "str": TemplateField(
- field_type="str",
- required=True,
- is_list=False,
- show=True,
- placeholder="",
- value="",
- ),
- "llm": TemplateField(field_type="BaseLanguageModel", required=True, is_list=False, show=True),
- "func": TemplateField(
- field_type="Callable",
- required=True,
- is_list=False,
- show=True,
- multiline=True,
- ),
- "code": TemplateField(
- field_type="str",
- required=True,
- is_list=False,
- show=True,
- value="",
- multiline=True,
- ),
- "path": TemplateField(
- field_type="file",
- required=True,
- is_list=False,
- show=True,
- value="",
- file_types=[".json", ".yaml", ".yml"],
- ),
-}
-
-
-class ToolCreator(LangChainTypeCreator):
- type_name: str = "tools"
- tools_dict: Optional[Dict] = None
-
- @property
- def type_to_loader_dict(self) -> Dict:
- settings_service = get_settings_service()
- if self.tools_dict is None:
- all_tools = {}
-
- for tool, tool_fcn in ALL_TOOLS_NAMES.items():
- try:
- tool_params = get_tool_params(tool_fcn)
- except Exception:
- logger.error(f"Error getting params for tool {tool}")
- continue
-
- tool_name = tool_params.get("name") or tool
-
- if tool_name in settings_service.settings.TOOLS or settings_service.settings.DEV:
- if tool_name == "JsonSpec":
- tool_params["path"] = tool_params.pop("dict_") # type: ignore
- all_tools[tool_name] = {
- "type": tool,
- "params": tool_params,
- "fcn": tool_fcn,
- }
-
- self.tools_dict = all_tools
-
- return self.tools_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of a tool."""
-
- base_classes = ["Tool", "BaseTool"]
- fields = []
- params = []
- tool_params = {}
-
- # Raise error if name is not in tools
- if name not in self.type_to_loader_dict.keys():
- raise ValueError("Tool not found")
-
- tool_type: str = self.type_to_loader_dict[name]["type"] # type: ignore
-
- # if tool_type in _BASE_TOOLS.keys():
- # params = []
- if tool_type in _LLM_TOOLS.keys():
- params = ["llm"]
- elif tool_type in _EXTRA_LLM_TOOLS.keys():
- extra_keys = _EXTRA_LLM_TOOLS[tool_type][1]
- params = ["llm"] + extra_keys
- elif tool_type in _EXTRA_OPTIONAL_TOOLS.keys():
- extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type][1]
- params = extra_keys
- # elif tool_type == "Tool":
- # params = ["name", "description", "func"]
- elif tool_type in CUSTOM_TOOLS:
- # Get custom tool params
- params = self.type_to_loader_dict[name]["params"] # type: ignore
- base_classes = ["Callable"]
- if node := customs.get_custom_nodes("tools").get(tool_type):
- return node
- elif tool_type in FILE_TOOLS:
- params = self.type_to_loader_dict[name]["params"] # type: ignore
- base_classes += [name]
- elif tool_type in OTHER_TOOLS:
- tool_dict = build_template_from_class(tool_type, OTHER_TOOLS)
- fields = tool_dict["template"]
-
- # _type is the only key in fields
- # return None
- if len(fields) == 1 and "_type" in fields:
- return None
-
- # Pop unnecessary fields and add name
- fields.pop("_type") # type: ignore
- fields.pop("return_direct", None) # type: ignore
- fields.pop("verbose", None) # type: ignore
-
- tool_params = {
- "name": fields.pop("name")["value"], # type: ignore
- "description": fields.pop("description")["value"], # type: ignore
- }
-
- fields = [
- TemplateField(name=name, field_type=field["type"], **field)
- for name, field in fields.items() # type: ignore
- ]
- base_classes += tool_dict["base_classes"]
-
- # Copy the field and add the name
- for param in params:
- field = TOOL_INPUTS.get(param, TOOL_INPUTS["str"]).copy()
- field.name = param
- field.advanced = False
- if param == "aiosession":
- field.show = False
- field.required = False
-
- fields.append(field)
-
- template = Template(fields=fields, type_name=tool_type)
-
- tool_params = {**tool_params, **self.type_to_loader_dict[name]["params"]}
- return {
- "template": util.format_dict(template.to_dict()),
- **tool_params,
- "base_classes": base_classes,
- }
-
- def to_list(self) -> List[str]:
- """List all load tools"""
-
- return list(self.type_to_loader_dict.keys())
-
-
-tool_creator = ToolCreator()
diff --git a/src/backend/base/langflow/interface/tools/constants.py b/src/backend/base/langflow/interface/tools/constants.py
deleted file mode 100644
index 27b42b327..000000000
--- a/src/backend/base/langflow/interface/tools/constants.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from langchain import tools
-from langchain.agents import Tool
-from langchain.agents.load_tools import _BASE_TOOLS, _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS
-from langchain_community.tools.json.tool import JsonSpec
-
-from langflow.interface.importing.utils import import_class
-from langflow.interface.tools.custom import PythonFunctionTool
-
-FILE_TOOLS = {"JsonSpec": JsonSpec}
-CUSTOM_TOOLS = {
- "Tool": Tool,
- "PythonFunctionTool": PythonFunctionTool,
-}
-
-OTHER_TOOLS = {tool: import_class(f"langchain_community.tools.{tool}") for tool in tools.__all__}
-
-ALL_TOOLS_NAMES = {
- **_BASE_TOOLS,
- **_LLM_TOOLS, # type: ignore
- **{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()}, # type: ignore
- **{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
- **CUSTOM_TOOLS,
- **FILE_TOOLS, # type: ignore
- **OTHER_TOOLS,
-}
diff --git a/src/backend/base/langflow/interface/tools/custom.py b/src/backend/base/langflow/interface/tools/custom.py
deleted file mode 100644
index 6ba8cac13..000000000
--- a/src/backend/base/langflow/interface/tools/custom.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from typing import Callable, Optional
-
-from langchain.agents.tools import Tool
-from pydantic.v1 import BaseModel, validator
-
-from langflow.interface.custom.utils import get_function
-from langflow.utils import validate
-
-
-class Function(BaseModel):
- code: str
- function: Optional[Callable] = None
- imports: Optional[str] = None
-
- # Eval code and store the function
- def __init__(self, **data):
- super().__init__(**data)
-
- # Validate the function
- @validator("code")
- def validate_func(cls, v):
- try:
- validate.eval_function(v)
- except Exception as e:
- raise e
-
- return v
-
- def get_function(self):
- """Get the function"""
- function_name = validate.extract_function_name(self.code)
-
- return validate.create_function(self.code, function_name)
-
-
-class PythonFunctionTool(Function, Tool):
- name: str = "Custom Tool"
- description: str
- code: str
-
- def ___init__(self, name: str, description: str, code: str):
- self.name = name
- self.description = description
- self.code = code
- self.func = get_function(self.code)
- super().__init__(name=name, description=description, func=self.func)
-
-
-class PythonFunction(Function):
- code: str
diff --git a/src/backend/base/langflow/interface/tools/util.py b/src/backend/base/langflow/interface/tools/util.py
deleted file mode 100644
index 7c8020aa9..000000000
--- a/src/backend/base/langflow/interface/tools/util.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import ast
-import inspect
-import textwrap
-from typing import Dict, Union
-
-from langchain.agents.tools import Tool
-
-
-def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
- tree = ast.parse(textwrap.dedent(inspect.getsource(func)))
-
- # Iterate over the statements in the abstract syntax tree
- for node in ast.walk(tree):
- # Find the first return statement
- if isinstance(node, ast.Return):
- tool = node.value
- if isinstance(tool, ast.Call):
- if isinstance(tool.func, ast.Name) and tool.func.id == "Tool":
- if tool.keywords:
- tool_params = {}
- for keyword in tool.keywords:
- if keyword.arg == "name":
- try:
- tool_params["name"] = ast.literal_eval(keyword.value)
- except ValueError:
- break
- elif keyword.arg == "description":
- try:
- tool_params["description"] = ast.literal_eval(keyword.value)
- except ValueError:
- continue
-
- return tool_params
- return {
- "name": ast.literal_eval(tool.args[0]),
- "description": ast.literal_eval(tool.args[2]),
- }
- #
- else:
- # get the class object from the return statement
- try:
- class_obj = eval(compile(ast.Expression(tool), "", "eval"))
- except Exception:
- return None
-
- return {
- "name": getattr(class_obj, "name"),
- "description": getattr(class_obj, "description"),
- }
- # Return None if no return statement was found
- return None
-
-
-def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
- tree = ast.parse(textwrap.dedent(inspect.getsource(cls)))
-
- tool_params = {}
-
- # Iterate over the statements in the abstract syntax tree
- for node in ast.walk(tree):
- if isinstance(node, ast.ClassDef):
- # Find the class definition and look for methods
- for stmt in node.body:
- if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__":
- # There is no assignment statements in the __init__ method
- # So we need to get the params from the function definition
- for arg in stmt.args.args:
- if arg.arg == "name":
- # It should be the name of the class
- tool_params[arg.arg] = cls.__name__
- elif arg.arg == "self":
- continue
- # If there is not default value, set it to an empty string
- else:
- try:
- annotation = ast.literal_eval(arg.annotation) # type: ignore
- tool_params[arg.arg] = annotation
- except ValueError:
- tool_params[arg.arg] = ""
- # Get the attribute name and the annotation
- elif cls != Tool and isinstance(stmt, ast.AnnAssign):
- # Get the attribute name and the annotation
- tool_params[stmt.target.id] = "" # type: ignore
-
- return tool_params
-
-
-def get_tool_params(tool, **kwargs) -> Dict:
- # Parse the function code into an abstract syntax tree
- # Define if it is a function or a class
- if inspect.isfunction(tool):
- return get_func_tool_params(tool, **kwargs) or {}
- elif inspect.isclass(tool):
- # Get the parameters necessary to
- # instantiate the class
-
- return get_class_tool_params(tool, **kwargs) or {}
-
- else:
- raise ValueError("Tool must be a function or class.")
diff --git a/src/backend/base/langflow/interface/types.py b/src/backend/base/langflow/interface/types.py
index 46fa44a37..a092a7d19 100644
--- a/src/backend/base/langflow/interface/types.py
+++ b/src/backend/base/langflow/interface/types.py
@@ -1,69 +1,10 @@
-from cachetools import LRUCache, cached
-
-from langflow.interface.agents.base import agent_creator
-from langflow.interface.chains.base import chain_creator
-from langflow.interface.custom.directory_reader.utils import merge_nested_dicts_with_renaming
-from langflow.interface.custom.utils import build_custom_components
-from langflow.interface.document_loaders.base import documentloader_creator
-from langflow.interface.embeddings.base import embedding_creator
-from langflow.interface.llms.base import llm_creator
-from langflow.interface.memories.base import memory_creator
-from langflow.interface.retrievers.base import retriever_creator
-from langflow.interface.text_splitters.base import textsplitter_creator
-from langflow.interface.toolkits.base import toolkits_creator
-from langflow.interface.tools.base import tool_creator
-from langflow.interface.wrappers.base import wrapper_creator
-
-
-# Used to get the base_classes list
-def get_type_list():
- """Get a list of all langchain types"""
- all_types = build_langchain_types_dict()
-
- # all_types.pop("tools")
-
- for key, value in all_types.items():
- all_types[key] = [item["template"]["_type"] for item in value.values()]
-
- return all_types
-
-
-@cached(LRUCache(maxsize=1))
-def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
- """Build a dictionary of all langchain types"""
- all_types = {}
-
- creators = [
- chain_creator,
- agent_creator,
- # prompt_creator,
- llm_creator,
- memory_creator,
- tool_creator,
- toolkits_creator,
- wrapper_creator,
- embedding_creator,
- # vectorstore_creator,
- documentloader_creator,
- textsplitter_creator,
- # utility_creator,
- retriever_creator,
- ]
-
- all_types = {}
- for creator in creators:
- created_types = creator.to_dict()
- if created_types[creator.type_name].values():
- all_types.update(created_types)
-
- return all_types
+from langflow.custom.utils import build_custom_components
def get_all_types_dict(components_paths):
"""Get all types dictionary combining native and custom components."""
- native_components = build_langchain_types_dict()
custom_components_from_file = build_custom_components(components_paths=components_paths)
- return merge_nested_dicts_with_renaming(native_components, custom_components_from_file)
+ return custom_components_from_file
def get_all_components(components_paths, as_dict=False):
diff --git a/src/backend/base/langflow/interface/utilities/__init__.py b/src/backend/base/langflow/interface/utilities/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/utilities/base.py b/src/backend/base/langflow/interface/utilities/base.py
deleted file mode 100644
index 474bf8ca2..000000000
--- a/src/backend/base/langflow/interface/utilities/base.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from typing import Dict, List, Optional, Type
-
-from langchain_community import utilities
-from loguru import logger
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.importing.utils import import_class
-from langflow.interface.utils import build_template_from_class
-from langflow.legacy_custom.customs import get_custom_nodes
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.utilities import UtilitiesFrontendNode
-
-
-class UtilityCreator(LangChainTypeCreator):
- type_name: str = "utilities"
-
- @property
- def frontend_node_class(self) -> Type[UtilitiesFrontendNode]:
- return UtilitiesFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- """
- Returns a dictionary mapping utility names to their corresponding loader classes.
- If the dictionary has not been created yet, it is created by importing all utility classes
- from the langchain.chains module and filtering them according to the settings.utilities list.
- """
- if self.type_dict is None:
- settings_service = get_settings_service()
- self.type_dict = {}
- for utility_name in utilities.__all__:
- try:
- imported = import_class(f"langchain_community.utilities.{utility_name}")
- self.type_dict[utility_name] = imported
- except Exception:
- pass
-
- self.type_dict["SQLDatabase"] = utilities.SQLDatabase
- # Filter according to settings.utilities
- self.type_dict = {
- name: utility
- for name, utility in self.type_dict.items()
- if name in settings_service.settings.UTILITIES or settings_service.settings.DEV
- }
-
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of a utility."""
- try:
- custom_nodes = get_custom_nodes(self.type_name)
- if name in custom_nodes.keys():
- return custom_nodes[name]
- return build_template_from_class(name, self.type_to_loader_dict)
- except ValueError as exc:
- raise ValueError(f"Utility {name} not found") from exc
-
- except AttributeError as exc:
- logger.error(f"Utility {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- return list(self.type_to_loader_dict.keys())
-
-
-utility_creator = UtilityCreator()
diff --git a/src/backend/base/langflow/interface/utils.py b/src/backend/base/langflow/interface/utils.py
index d4271eabf..986352f15 100644
--- a/src/backend/base/langflow/interface/utils.py
+++ b/src/backend/base/langflow/interface/utils.py
@@ -7,12 +7,13 @@ from typing import Dict
import yaml
from docstring_parser import parse
-from langchain.base_language import BaseLanguageModel
+from langchain_core.language_models import BaseLanguageModel
+from loguru import logger
+from PIL.Image import Image
+
from langflow.services.chat.config import ChatConfig
from langflow.services.deps import get_settings_service
from langflow.utils.util import format_dict, get_base_classes, get_default_factory
-from loguru import logger
-from PIL.Image import Image
def load_file_into_dict(file_path: str) -> dict:
@@ -95,13 +96,14 @@ def setup_llm_caching():
try:
set_langchain_cache(settings_service.settings)
except ImportError:
- logger.warning(f"Could not import {settings_service.settings.CACHE_TYPE}. ")
+ logger.warning(f"Could not import {settings_service.settings.cache_type}. ")
except Exception as exc:
logger.warning(f"Could not setup LLM caching. Error: {exc}")
def set_langchain_cache(settings):
from langchain.globals import set_llm_cache
+
from langflow.interface.importing.utils import import_class
if cache_type := os.getenv("LANGFLOW_LANGCHAIN_CACHE"):
diff --git a/src/backend/base/langflow/interface/vector_store/__init__.py b/src/backend/base/langflow/interface/vector_store/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/vector_store/base.py b/src/backend/base/langflow/interface/vector_store/base.py
deleted file mode 100644
index 893c78fca..000000000
--- a/src/backend/base/langflow/interface/vector_store/base.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from typing import Any, Dict, List, Optional, Type
-
-from langchain import vectorstores
-from loguru import logger
-
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.importing.utils import import_class
-from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode
-from langflow.utils.util import build_template_from_method
-
-
-class VectorstoreCreator(LangChainTypeCreator):
- type_name: str = "vectorstores"
-
- @property
- def frontend_node_class(self) -> Type[VectorStoreFrontendNode]:
- return VectorStoreFrontendNode
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- self.type_dict: dict[str, Any] = {
- vectorstore_name: import_class(f"langchain_community.vectorstores.{vectorstore_name}")
- for vectorstore_name in vectorstores.__all__
- }
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- """Get the signature of an embedding."""
- try:
- return build_template_from_method(
- name,
- type_to_cls_dict=self.type_to_loader_dict,
- method_name="from_texts",
- )
- except ValueError as exc:
- raise ValueError(f"Vector Store {name} not found") from exc
- except AttributeError as exc:
- logger.error(f"Vector Store {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- settings_service = get_settings_service()
- return [
- vectorstore
- for vectorstore in self.type_to_loader_dict.keys()
- if vectorstore in settings_service.settings.VECTORSTORES or settings_service.settings.DEV
- ]
-
-
-vectorstore_creator = VectorstoreCreator()
diff --git a/src/backend/base/langflow/interface/wrappers/__init__.py b/src/backend/base/langflow/interface/wrappers/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/src/backend/base/langflow/interface/wrappers/base.py b/src/backend/base/langflow/interface/wrappers/base.py
deleted file mode 100644
index b850d345f..000000000
--- a/src/backend/base/langflow/interface/wrappers/base.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from typing import Dict, List, Optional
-
-from langchain_community.utilities import requests
-from langflow.interface.base import LangChainTypeCreator
-from langflow.interface.utils import build_template_from_class
-from loguru import logger
-
-
-class WrapperCreator(LangChainTypeCreator):
- type_name: str = "wrappers"
-
- @property
- def type_to_loader_dict(self) -> Dict:
- if self.type_dict is None:
- self.type_dict = {wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper]}
- return self.type_dict
-
- def get_signature(self, name: str) -> Optional[Dict]:
- try:
- return build_template_from_class(name, self.type_to_loader_dict)
- except ValueError as exc:
- raise ValueError("Wrapper not found") from exc
- except AttributeError as exc:
- logger.error(f"Wrapper {name} not loaded: {exc}")
- return None
-
- def to_list(self) -> List[str]:
- return list(self.type_to_loader_dict.keys())
-
-
-wrapper_creator = WrapperCreator()
diff --git a/src/backend/base/langflow/legacy_custom/customs.py b/src/backend/base/langflow/legacy_custom/customs.py
index ff69064ff..26e5e33fa 100644
--- a/src/backend/base/langflow/legacy_custom/customs.py
+++ b/src/backend/base/langflow/legacy_custom/customs.py
@@ -2,33 +2,6 @@ from langflow.template import frontend_node
# These should always be instantiated
CUSTOM_NODES: dict[str, dict[str, frontend_node.base.FrontendNode]] = {
- # "prompts": {
- # "ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode(),
- # },
- "tools": {
- "PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(),
- "Tool": frontend_node.tools.ToolNode(),
- },
- "agents": {
- "JsonAgent": frontend_node.agents.JsonAgentNode(),
- "CSVAgent": frontend_node.agents.CSVAgentNode(),
- "VectorStoreAgent": frontend_node.agents.VectorStoreAgentNode(),
- "VectorStoreRouterAgent": frontend_node.agents.VectorStoreRouterAgentNode(),
- "SQLAgent": frontend_node.agents.SQLAgentNode(),
- },
- "utilities": {
- "SQLDatabase": frontend_node.agents.SQLDatabaseNode(),
- },
- "memories": {
- "PostgresChatMessageHistory": frontend_node.memories.PostgresChatMessageHistoryFrontendNode(),
- "MongoDBChatMessageHistory": frontend_node.memories.MongoDBChatMessageHistoryFrontendNode(),
- },
- "chains": {
- "SeriesCharacterChain": frontend_node.chains.SeriesCharacterChainNode(),
- "TimeTravelGuideChain": frontend_node.chains.TimeTravelGuideChainNode(),
- "MidJourneyPromptChain": frontend_node.chains.MidJourneyPromptChainNode(),
- "load_qa_chain": frontend_node.chains.CombineDocsChainNode(),
- },
"custom_components": {
"CustomComponent": frontend_node.custom_components.CustomComponentFrontendNode(),
},
diff --git a/src/backend/base/langflow/load.py b/src/backend/base/langflow/load.py
deleted file mode 100644
index 1262ac4b9..000000000
--- a/src/backend/base/langflow/load.py
+++ /dev/null
@@ -1 +0,0 @@
-from langflow.processing.load import load_flow_from_json, run_flow_from_json # noqa: F401
diff --git a/src/backend/base/langflow/load/__init__.py b/src/backend/base/langflow/load/__init__.py
new file mode 100644
index 000000000..2002e8bb1
--- /dev/null
+++ b/src/backend/base/langflow/load/__init__.py
@@ -0,0 +1,3 @@
+from .load import load_flow_from_json, run_flow_from_json # noqa: F401
+
+__all__ = ["load_flow_from_json", "run_flow_from_json"]
diff --git a/src/backend/base/langflow/processing/load.py b/src/backend/base/langflow/load/load.py
similarity index 100%
rename from src/backend/base/langflow/processing/load.py
rename to src/backend/base/langflow/load/load.py
diff --git a/src/backend/base/langflow/processing/base.py b/src/backend/base/langflow/processing/base.py
index e11af0a44..26da99842 100644
--- a/src/backend/base/langflow/processing/base.py
+++ b/src/backend/base/langflow/processing/base.py
@@ -1,10 +1,8 @@
from typing import TYPE_CHECKING, List, Union
-from langchain.agents.agent import AgentExecutor
-from langchain.callbacks.base import BaseCallbackHandler
+from langchain_core.callbacks import BaseCallbackHandler
from loguru import logger
-from langflow.processing.process import fix_memory_inputs, format_actions
from langflow.services.deps import get_plugins_service
if TYPE_CHECKING:
@@ -44,48 +42,3 @@ def flush_langfuse_callback_if_present(callbacks: List[Union[BaseCallbackHandler
if hasattr(callback, "langfuse") and hasattr(callback.langfuse, "flush"):
callback.langfuse.flush()
break
-
-
-async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwargs):
- """Get result and thought from extracted json"""
-
- try:
- if hasattr(langchain_object, "verbose"):
- langchain_object.verbose = True
-
- if hasattr(langchain_object, "return_intermediate_steps"):
- # https://github.com/hwchase17/langchain/issues/2068
- # Deactivating until we have a frontend solution
- # to display intermediate steps
- langchain_object.return_intermediate_steps = True
- try:
- if not isinstance(langchain_object, AgentExecutor):
- fix_memory_inputs(langchain_object)
- except Exception as exc:
- logger.error(f"Error fixing memory inputs: {exc}")
-
- trace_id = kwargs.pop("session_id", None)
- try:
- callbacks = setup_callbacks(sync=False, trace_id=trace_id, **kwargs)
- output = await langchain_object.acall(inputs, callbacks=callbacks)
- except Exception as exc:
- # make the error message more informative
- logger.debug(f"Error: {str(exc)}")
- callbacks = setup_callbacks(sync=True, trace_id=trace_id, **kwargs)
- output = langchain_object(inputs, callbacks=callbacks)
-
- # if langfuse callback is present, run callback.langfuse.flush()
- flush_langfuse_callback_if_present(callbacks)
-
- intermediate_steps = output.get("intermediate_steps", []) if isinstance(output, dict) else []
-
- result = output.get(langchain_object.output_keys[0]) if isinstance(output, dict) else output
- try:
- thought = format_actions(intermediate_steps) if intermediate_steps else ""
- except Exception as exc:
- logger.exception(exc)
- thought = ""
- except Exception as exc:
- logger.exception(exc)
- raise ValueError(f"Error: {str(exc)}") from exc
- return result, thought, output
diff --git a/src/backend/base/langflow/processing/process.py b/src/backend/base/langflow/processing/process.py
index d46274b4c..d53b5e25f 100644
--- a/src/backend/base/langflow/processing/process.py
+++ b/src/backend/base/langflow/processing/process.py
@@ -1,127 +1,19 @@
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
-
-from langchain.agents import AgentExecutor
-from langchain.schema import AgentAction
from loguru import logger
from pydantic import BaseModel
from langflow.graph.graph.base import Graph
from langflow.graph.schema import RunOutputs
from langflow.graph.vertex.base import Vertex
-from langflow.interface.run import get_memory_key, update_memory_keys
from langflow.schema.graph import InputValue, Tweaks
from langflow.schema.schema import INPUT_FIELD_NAME
from langflow.services.session.service import SessionService
-
if TYPE_CHECKING:
from langflow.api.v1.schemas import InputValueRequest
-def fix_memory_inputs(langchain_object):
- """
- Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the
- object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the
- get_memory_key function and updates the memory keys using the update_memory_keys function.
- """
- if not hasattr(langchain_object, "memory") or langchain_object.memory is None:
- return
- try:
- if (
- hasattr(langchain_object.memory, "memory_key")
- and langchain_object.memory.memory_key in langchain_object.input_variables
- ):
- return
- except AttributeError:
- input_variables = (
- langchain_object.prompt.input_variables
- if hasattr(langchain_object, "prompt")
- else langchain_object.input_keys
- )
- if langchain_object.memory.memory_key in input_variables:
- return
-
- possible_new_mem_key = get_memory_key(langchain_object)
- if possible_new_mem_key is not None:
- update_memory_keys(langchain_object, possible_new_mem_key)
-
-
-def format_actions(actions: List[Tuple[AgentAction, str]]) -> str:
- """Format a list of (AgentAction, answer) tuples into a string."""
- output = []
- for action, answer in actions:
- log = action.log
- tool = action.tool
- tool_input = action.tool_input
- output.append(f"Log: {log}")
- if "Action" not in log and "Action Input" not in log:
- output.append(f"Tool: {tool}")
- output.append(f"Tool Input: {tool_input}")
- output.append(f"Answer: {answer}")
- output.append("") # Add a blank line
- return "\n".join(output)
-
-
-def get_result_and_thought(langchain_object: Any, inputs: dict):
- """Get result and thought from extracted json"""
- try:
- if hasattr(langchain_object, "verbose"):
- langchain_object.verbose = True
-
- if hasattr(langchain_object, "return_intermediate_steps"):
- langchain_object.return_intermediate_steps = False
-
- try:
- if not isinstance(langchain_object, AgentExecutor):
- fix_memory_inputs(langchain_object)
- except Exception as exc:
- logger.error(f"Error fixing memory inputs: {exc}")
-
- try:
- output = langchain_object(inputs, return_only_outputs=True)
- except ValueError as exc:
- # make the error message more informative
- logger.debug(f"Error: {str(exc)}")
- output = langchain_object.run(inputs)
-
- except Exception as exc:
- raise ValueError(f"Error: {str(exc)}") from exc
- return output
-
-
-def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]:
- """Get input string if only one input is provided"""
- return list(inputs.values())[0] if len(inputs) == 1 else None
-
-
-def process_inputs(
- inputs: Optional[Union[dict, List[dict]]] = None,
- artifacts: Optional[Dict[str, Any]] = None,
-) -> Union[dict, List[dict]]:
- if inputs is None:
- inputs = {}
- if artifacts is None:
- artifacts = {}
-
- if isinstance(inputs, dict):
- inputs = update_inputs_dict(inputs, artifacts)
- elif isinstance(inputs, List):
- inputs = [update_inputs_dict(inp, artifacts) for inp in inputs]
-
- return inputs
-
-
-def update_inputs_dict(inputs: dict, artifacts: Dict[str, Any]) -> dict:
- for key, value in artifacts.items():
- if key == "repr":
- continue
- elif key not in inputs or not inputs[key]:
- inputs[key] = value
-
- return inputs
-
-
class Result(BaseModel):
result: Any
session_id: str
diff --git a/src/backend/base/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py
index a64b47f3a..b04eb6417 100644
--- a/src/backend/base/langflow/services/cache/factory.py
+++ b/src/backend/base/langflow/services/cache/factory.py
@@ -16,14 +16,14 @@ class CacheServiceFactory(ServiceFactory):
# Here you would have logic to create and configure a CacheService
# based on the settings_service
- if settings_service.settings.CACHE_TYPE == "redis":
+ if settings_service.settings.cache_type == "redis":
logger.debug("Creating Redis cache")
redis_cache = RedisCache(
- host=settings_service.settings.REDIS_HOST,
- port=settings_service.settings.REDIS_PORT,
- db=settings_service.settings.REDIS_DB,
- url=settings_service.settings.REDIS_URL,
- expiration_time=settings_service.settings.REDIS_CACHE_EXPIRE,
+ host=settings_service.settings.redis_host,
+ port=settings_service.settings.redis_port,
+ db=settings_service.settings.redis_db,
+ url=settings_service.settings.redis_url,
+ expiration_time=settings_service.settings.redis_cache_expire,
)
if redis_cache.is_connected():
logger.debug("Redis cache is connected")
@@ -31,7 +31,7 @@ class CacheServiceFactory(ServiceFactory):
logger.warning("Redis cache is not connected, falling back to in-memory cache")
return ThreadingInMemoryCache()
- elif settings_service.settings.CACHE_TYPE == "memory":
+ elif settings_service.settings.cache_type == "memory":
return ThreadingInMemoryCache()
- elif settings_service.settings.CACHE_TYPE == "async":
+ elif settings_service.settings.cache_type == "async":
return AsyncInMemoryCache()
diff --git a/src/backend/base/langflow/services/chat/utils.py b/src/backend/base/langflow/services/chat/utils.py
deleted file mode 100644
index 271c0e85b..000000000
--- a/src/backend/base/langflow/services/chat/utils.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from typing import Any
-
-from langchain.agents import AgentExecutor
-from langchain.chains.base import Chain
-from langchain_core.runnables import Runnable
-from loguru import logger
-
-from langflow.api.v1.schemas import ChatMessage
-from langflow.interface.utils import try_setting_streaming_options
-from langflow.processing.base import get_result_and_steps
-
-LANGCHAIN_RUNNABLES = (Chain, Runnable, AgentExecutor)
-
-
-async def process_graph(
- build_result,
- chat_inputs: ChatMessage,
- client_id: str,
- session_id: str,
-):
- build_result = try_setting_streaming_options(build_result)
- logger.debug("Loaded langchain object")
-
- if build_result is None:
- # Raise user facing error
- raise ValueError("There was an error loading the langchain_object. Please, check all the nodes and try again.")
-
- # Generate result and thought
- try:
- if chat_inputs.message is None:
- logger.debug("No message provided")
- chat_inputs.message = {}
-
- logger.debug("Generating result and thought")
- if isinstance(build_result, LANGCHAIN_RUNNABLES):
- result, intermediate_steps, raw_output = await get_result_and_steps(
- build_result,
- chat_inputs.message,
- client_id=client_id,
- session_id=session_id,
- )
- else:
- raise TypeError(f"Unknown type {type(build_result)}")
- logger.debug("Generated result and intermediate_steps")
- return result, intermediate_steps, raw_output
- except Exception as e:
- # Log stack trace
- logger.exception(e)
- raise e
-
-
-async def run_build_result(build_result: Any, chat_inputs: ChatMessage, client_id: str, session_id: str):
- return build_result(inputs=chat_inputs.message)
diff --git a/src/backend/base/langflow/services/database/factory.py b/src/backend/base/langflow/services/database/factory.py
index 3b03da131..7f7a142b5 100644
--- a/src/backend/base/langflow/services/database/factory.py
+++ b/src/backend/base/langflow/services/database/factory.py
@@ -1,6 +1,5 @@
from typing import TYPE_CHECKING
-
from langflow.services.database.service import DatabaseService
from langflow.services.factory import ServiceFactory
@@ -14,6 +13,6 @@ class DatabaseServiceFactory(ServiceFactory):
def create(self, settings_service: "SettingsService"):
# Here you would have logic to create and configure a DatabaseService
- if not settings_service.settings.DATABASE_URL:
+ if not settings_service.settings.database_url:
raise ValueError("No database URL provided")
- return DatabaseService(settings_service.settings.DATABASE_URL)
+ return DatabaseService(settings_service.settings.database_url)
diff --git a/src/backend/base/langflow/services/database/models/api_key/model.py b/src/backend/base/langflow/services/database/models/api_key/model.py
index be4e3ed4d..cb216d9ae 100644
--- a/src/backend/base/langflow/services/database/models/api_key/model.py
+++ b/src/backend/base/langflow/services/database/models/api_key/model.py
@@ -2,7 +2,7 @@ from datetime import datetime, timezone
from typing import TYPE_CHECKING, Optional
from uuid import UUID, uuid4
-from pydantic import field_validator, validator
+from pydantic import field_validator
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel, func
if TYPE_CHECKING:
@@ -40,6 +40,7 @@ class ApiKeyCreate(ApiKeyBase):
created_at: Optional[datetime] = Field(default_factory=utc_now)
@field_validator("created_at", mode="before")
+ @classmethod
def set_created_at(cls, v):
return v or utc_now()
@@ -52,10 +53,11 @@ class UnmaskedApiKeyRead(ApiKeyBase):
class ApiKeyRead(ApiKeyBase):
id: UUID
- api_key: str = Field()
+ api_key: str = Field(schema_extra={"validate_default": True})
user_id: UUID = Field()
- @validator("api_key", always=True)
+ @field_validator("api_key")
+ @classmethod
def mask_api_key(cls, v):
# This validator will always run, and will mask the API key
return f"{v[:8]}{'*' * (len(v) - 8)}"
diff --git a/src/backend/base/langflow/services/database/service.py b/src/backend/base/langflow/services/database/service.py
index 14c79f85b..674c6c645 100644
--- a/src/backend/base/langflow/services/database/service.py
+++ b/src/backend/base/langflow/services/database/service.py
@@ -37,7 +37,7 @@ class DatabaseService(Service):
def _create_engine(self) -> "Engine":
"""Create the engine for the database."""
settings_service = get_settings_service()
- if settings_service.settings.DATABASE_URL and settings_service.settings.DATABASE_URL.startswith("sqlite"):
+ if settings_service.settings.database_url and settings_service.settings.database_url.startswith("sqlite"):
connect_args = {"check_same_thread": False}
else:
connect_args = {}
diff --git a/src/backend/base/langflow/services/plugins/langfuse_plugin.py b/src/backend/base/langflow/services/plugins/langfuse_plugin.py
index e6d37d3c5..ffc8139f3 100644
--- a/src/backend/base/langflow/services/plugins/langfuse_plugin.py
+++ b/src/backend/base/langflow/services/plugins/langfuse_plugin.py
@@ -24,12 +24,12 @@ class LangfuseInstance:
settings_manager = get_settings_service()
- if settings_manager.settings.LANGFUSE_PUBLIC_KEY and settings_manager.settings.LANGFUSE_SECRET_KEY:
+ if settings_manager.settings.langfuse_public_key and settings_manager.settings.langfuse_secret_key:
logger.debug("Langfuse credentials found")
cls._instance = Langfuse(
- public_key=settings_manager.settings.LANGFUSE_PUBLIC_KEY,
- secret_key=settings_manager.settings.LANGFUSE_SECRET_KEY,
- host=settings_manager.settings.LANGFUSE_HOST,
+ public_key=settings_manager.settings.langfuse_public_key,
+ secret_key=settings_manager.settings.langfuse_secret_key,
+ host=settings_manager.settings.langfuse_host,
)
else:
logger.debug("No Langfuse credentials found")
diff --git a/src/backend/base/langflow/services/settings/auth.py b/src/backend/base/langflow/services/settings/auth.py
index 103c96a40..0ea3c237e 100644
--- a/src/backend/base/langflow/services/settings/auth.py
+++ b/src/backend/base/langflow/services/settings/auth.py
@@ -4,7 +4,7 @@ from typing import Literal
from loguru import logger
from passlib.context import CryptContext
-from pydantic import Field, SecretStr, validator
+from pydantic import Field, SecretStr, field_validator
from pydantic_settings import BaseSettings
from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD
@@ -62,23 +62,25 @@ class AuthSettings(BaseSettings):
# the default values
# so we need to validate the superuser and superuser_password
# fields
- @validator("SUPERUSER", "SUPERUSER_PASSWORD", pre=True)
- def validate_superuser(cls, value, values):
- if values.get("AUTO_LOGIN"):
+ @field_validator("SUPERUSER", "SUPERUSER_PASSWORD", mode="before")
+ @classmethod
+ def validate_superuser(cls, value, info):
+ if info.data.get("AUTO_LOGIN"):
if value != DEFAULT_SUPERUSER:
value = DEFAULT_SUPERUSER
logger.debug("Resetting superuser to default value")
- if values.get("SUPERUSER_PASSWORD") != DEFAULT_SUPERUSER_PASSWORD:
- values["SUPERUSER_PASSWORD"] = DEFAULT_SUPERUSER_PASSWORD
+ if info.data.get("SUPERUSER_PASSWORD") != DEFAULT_SUPERUSER_PASSWORD:
+ info.data["SUPERUSER_PASSWORD"] = DEFAULT_SUPERUSER_PASSWORD
logger.debug("Resetting superuser password to default value")
return value
return value
- @validator("SECRET_KEY", pre=True)
- def get_secret_key(cls, value, values):
- config_dir = values.get("CONFIG_DIR")
+ @field_validator("SECRET_KEY", mode="before")
+ @classmethod
+ def get_secret_key(cls, value, info):
+ config_dir = info.data.get("CONFIG_DIR")
if not config_dir:
logger.debug("No CONFIG_DIR provided, not saving secret key")
diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py
index 97abf2d2b..05a368c9b 100644
--- a/src/backend/base/langflow/services/settings/base.py
+++ b/src/backend/base/langflow/services/settings/base.py
@@ -7,12 +7,13 @@ from typing import Any, List, Optional, Tuple, Type
import orjson
import yaml
-from langflow.services.settings.constants import VARIABLES_TO_GET_FROM_ENVIRONMENT
from loguru import logger
from pydantic import field_validator
from pydantic.fields import FieldInfo
from pydantic_settings import BaseSettings, EnvSettingsSource, PydanticBaseSettingsSource, SettingsConfigDict
+from langflow.services.settings.constants import VARIABLES_TO_GET_FROM_ENVIRONMENT
+
# BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
BASE_COMPONENTS_PATH = str(Path(__file__).parent.parent.parent / "components")
@@ -57,59 +58,43 @@ class MyCustomSource(EnvSettingsSource):
class Settings(BaseSettings):
- CHAINS: dict = {}
- AGENTS: dict = {}
- PROMPTS: dict = {}
- LLMS: dict = {}
- TOOLS: dict = {}
- MEMORIES: dict = {}
- EMBEDDINGS: dict = {}
- VECTORSTORES: dict = {}
- DOCUMENTLOADERS: dict = {}
- WRAPPERS: dict = {}
- RETRIEVERS: dict = {}
- TOOLKITS: dict = {}
- TEXTSPLITTERS: dict = {}
- UTILITIES: dict = {}
- CUSTOM_COMPONENTS: dict = {}
-
# Define the default LANGFLOW_DIR
- CONFIG_DIR: Optional[str] = None
+ config_dir: Optional[str] = None
# Define if langflow db should be saved in config dir or
# in the langflow directory
- SAVE_DB_IN_CONFIG_DIR: bool = False
+ save_db_in_config_dir: bool = False
"""Define if langflow database should be saved in LANGFLOW_CONFIG_DIR or in the langflow directory (i.e. in the package directory)."""
- DEV: bool = False
- DATABASE_URL: Optional[str] = None
- CACHE_TYPE: str = "async"
- REMOVE_API_KEYS: bool = False
- COMPONENTS_PATH: List[str] = []
- LANGCHAIN_CACHE: str = "InMemoryCache"
+ dev: bool = False
+ database_url: Optional[str] = None
+ cache_type: str = "async"
+ remove_api_keys: bool = False
+ components_path: List[str] = []
+ langchain_cache: str = "InMemoryCache"
# Redis
- REDIS_HOST: str = "localhost"
- REDIS_PORT: int = 6379
- REDIS_DB: int = 0
- REDIS_URL: Optional[str] = None
- REDIS_CACHE_EXPIRE: int = 3600
+ redis_host: str = "localhost"
+ redis_port: int = 6379
+ redis_db: int = 0
+ redis_url: Optional[str] = None
+ redis_cache_expire: int = 3600
# PLUGIN_DIR: Optional[str] = None
- LANGFUSE_SECRET_KEY: Optional[str] = None
- LANGFUSE_PUBLIC_KEY: Optional[str] = None
- LANGFUSE_HOST: Optional[str] = None
+ langfuse_secret_key: Optional[str] = None
+ langfuse_public_key: Optional[str] = None
+ langfuse_host: Optional[str] = None
- STORE: Optional[bool] = True
- STORE_URL: Optional[str] = "https://api.langflow.store"
- DOWNLOAD_WEBHOOK_URL: Optional[str] = (
+ store: Optional[bool] = True
+ store_url: Optional[str] = "https://api.langflow.store"
+ download_webhook_url: Optional[str] = (
"https://api.langflow.store/flows/trigger/ec611a61-8460-4438-b187-a4f65e5559d4"
)
- LIKE_WEBHOOK_URL: Optional[str] = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da"
+ like_webhook_url: Optional[str] = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da"
- STORAGE_TYPE: str = "local"
+ storage_type: str = "local"
- CELERY_ENABLED: bool = False
+ celery_enabled: bool = False
fallback_to_env_var: bool = True
"""If set to True, Global Variables set in the UI will fallback to a environment variable
@@ -120,7 +105,7 @@ class Settings(BaseSettings):
variables_to_get_from_environment: list[str] = VARIABLES_TO_GET_FROM_ENVIRONMENT
"""List of environment variables to get from the environment and store in the database."""
- @field_validator("CONFIG_DIR", mode="before")
+ @field_validator("config_dir", mode="before")
def set_langflow_dir(cls, value):
if not value:
from platformdirs import user_cache_dir
@@ -143,7 +128,7 @@ class Settings(BaseSettings):
return str(value)
- @field_validator("DATABASE_URL", mode="before")
+ @field_validator("database_url", mode="before")
def set_database_url(cls, value, info):
if not value:
logger.debug("No database_url provided, trying LANGFLOW_DATABASE_URL env variable")
@@ -151,17 +136,17 @@ class Settings(BaseSettings):
value = langflow_database_url
logger.debug("Using LANGFLOW_DATABASE_URL env variable.")
else:
- logger.debug("No DATABASE_URL env variable, using sqlite database")
+ logger.debug("No database_url env variable, using sqlite database")
# Originally, we used sqlite:///./langflow.db
# so we need to migrate to the new format
# if there is a database in that location
- if not info.data["CONFIG_DIR"]:
- raise ValueError("CONFIG_DIR not set, please set it or provide a DATABASE_URL")
+ if not info.data["config_dir"]:
+ raise ValueError("config_dir not set, please set it or provide a database_url")
from langflow.version import is_pre_release # type: ignore
- if info.data["SAVE_DB_IN_CONFIG_DIR"]:
- database_dir = info.data["CONFIG_DIR"]
- logger.debug(f"Saving database to CONFIG_DIR: {database_dir}")
+ if info.data["save_db_in_config_dir"]:
+ database_dir = info.data["config_dir"]
+ logger.debug(f"Saving database to config_dir: {database_dir}")
else:
database_dir = Path(__file__).parent.parent.parent.resolve()
logger.debug(f"Saving database to langflow directory: {database_dir}")
@@ -174,12 +159,12 @@ class Settings(BaseSettings):
if is_pre_release:
if Path(new_pre_path).exists():
final_path = new_pre_path
- elif Path(new_path).exists() and info.data["SAVE_DB_IN_CONFIG_DIR"]:
+ elif Path(new_path).exists() and info.data["save_db_in_config_dir"]:
# We need to copy the current db to the new location
logger.debug("Copying existing database to new location")
copy2(new_path, new_pre_path)
logger.debug(f"Copied existing database to {new_pre_path}")
- elif Path(f"./{db_file_name}").exists() and info.data["SAVE_DB_IN_CONFIG_DIR"]:
+ elif Path(f"./{db_file_name}").exists() and info.data["save_db_in_config_dir"]:
logger.debug("Copying existing database to new location")
copy2(f"./{db_file_name}", new_pre_path)
logger.debug(f"Copied existing database to {new_pre_path}")
@@ -211,7 +196,7 @@ class Settings(BaseSettings):
return value
- @field_validator("COMPONENTS_PATH", mode="before")
+ @field_validator("components_path", mode="before")
def set_components_path(cls, value):
if os.getenv("LANGFLOW_COMPONENTS_PATH"):
logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path")
@@ -240,23 +225,8 @@ class Settings(BaseSettings):
def update_from_yaml(self, file_path: str, dev: bool = False):
new_settings = load_settings_from_yaml(file_path)
- self.CHAINS = new_settings.CHAINS or {}
- self.AGENTS = new_settings.AGENTS or {}
- self.PROMPTS = new_settings.PROMPTS or {}
- self.LLMS = new_settings.LLMS or {}
- self.TOOLS = new_settings.TOOLS or {}
- self.MEMORIES = new_settings.MEMORIES or {}
- self.WRAPPERS = new_settings.WRAPPERS or {}
- self.TOOLKITS = new_settings.TOOLKITS or {}
- self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {}
- self.UTILITIES = new_settings.UTILITIES or {}
- self.EMBEDDINGS = new_settings.EMBEDDINGS or {}
- self.VECTORSTORES = new_settings.VECTORSTORES or {}
- self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {}
- self.RETRIEVERS = new_settings.RETRIEVERS or {}
- self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {}
- self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or []
- self.DEV = dev
+ self.components_path = new_settings.components_path or []
+ self.dev = dev
def update_settings(self, **kwargs):
logger.debug("Updating settings")
@@ -325,6 +295,3 @@ def load_settings_from_yaml(file_path: str) -> Settings:
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
return Settings(**settings_dict)
- return Settings(**settings_dict)
- return Settings(**settings_dict)
- return Settings(**settings_dict)
diff --git a/src/backend/base/langflow/services/settings/manager.py b/src/backend/base/langflow/services/settings/manager.py
index f81c3f0c5..d7d2184f3 100644
--- a/src/backend/base/langflow/services/settings/manager.py
+++ b/src/backend/base/langflow/services/settings/manager.py
@@ -35,10 +35,10 @@ class SettingsService(Service):
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
settings = Settings(**settings_dict)
- if not settings.CONFIG_DIR:
+ if not settings.config_dir:
raise ValueError("CONFIG_DIR must be set in settings")
auth_settings = AuthSettings(
- CONFIG_DIR=settings.CONFIG_DIR,
+ CONFIG_DIR=settings.config_dir,
)
return cls(settings, auth_settings)
diff --git a/src/backend/base/langflow/services/settings/service.py b/src/backend/base/langflow/services/settings/service.py
index 160c266ec..0d9d63bc4 100644
--- a/src/backend/base/langflow/services/settings/service.py
+++ b/src/backend/base/langflow/services/settings/service.py
@@ -1,10 +1,11 @@
import os
import yaml
+from loguru import logger
+
from langflow.services.base import Service
from langflow.services.settings.auth import AuthSettings
from langflow.services.settings.base import Settings
-from loguru import logger
class SettingsService(Service):
@@ -34,10 +35,10 @@ class SettingsService(Service):
logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
settings = Settings(**settings_dict)
- if not settings.CONFIG_DIR:
+ if not settings.config_dir:
raise ValueError("CONFIG_DIR must be set in settings")
auth_settings = AuthSettings(
- CONFIG_DIR=settings.CONFIG_DIR,
+ CONFIG_DIR=settings.config_dir,
)
return cls(settings, auth_settings)
diff --git a/src/backend/base/langflow/services/storage/factory.py b/src/backend/base/langflow/services/storage/factory.py
index 1b2baf050..ae4783f1e 100644
--- a/src/backend/base/langflow/services/storage/factory.py
+++ b/src/backend/base/langflow/services/storage/factory.py
@@ -13,7 +13,7 @@ class StorageServiceFactory(ServiceFactory):
)
def create(self, session_service: SessionService, settings_service: SettingsService):
- storage_type = settings_service.settings.STORAGE_TYPE
+ storage_type = settings_service.settings.storage_type
if storage_type.lower() == "local":
from .local import LocalStorageService
diff --git a/src/backend/base/langflow/services/storage/local.py b/src/backend/base/langflow/services/storage/local.py
index 815059857..9ad9feafb 100644
--- a/src/backend/base/langflow/services/storage/local.py
+++ b/src/backend/base/langflow/services/storage/local.py
@@ -11,7 +11,7 @@ class LocalStorageService(StorageService):
def __init__(self, session_service, settings_service):
"""Initialize the local storage service with session and settings services."""
super().__init__(session_service, settings_service)
- self.data_dir = Path(settings_service.settings.CONFIG_DIR)
+ self.data_dir = Path(settings_service.settings.config_dir)
self.set_ready()
def build_full_path(self, flow_id: str, file_name: str) -> str:
diff --git a/src/backend/base/langflow/services/store/schema.py b/src/backend/base/langflow/services/store/schema.py
index 0fe89de18..0c37e1166 100644
--- a/src/backend/base/langflow/services/store/schema.py
+++ b/src/backend/base/langflow/services/store/schema.py
@@ -1,7 +1,7 @@
from typing import List, Optional
from uuid import UUID
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
class TagResponse(BaseModel):
@@ -37,7 +37,8 @@ class ListComponentResponse(BaseModel):
private: Optional[bool] = None
# tags comes as a TagsIdResponse but we want to return a list of TagResponse
- @validator("tags", pre=True)
+ @field_validator("tags", mode="before")
+ @classmethod
def tags_to_list(cls, v):
# Check if all values are have id and name
# if so, return v else transform to TagResponse
diff --git a/src/backend/base/langflow/services/store/service.py b/src/backend/base/langflow/services/store/service.py
index 4fabd435c..a1b221b63 100644
--- a/src/backend/base/langflow/services/store/service.py
+++ b/src/backend/base/langflow/services/store/service.py
@@ -79,9 +79,9 @@ class StoreService(Service):
def __init__(self, settings_service: "SettingsService"):
self.settings_service = settings_service
- self.base_url = self.settings_service.settings.STORE_URL
- self.download_webhook_url = self.settings_service.settings.DOWNLOAD_WEBHOOK_URL
- self.like_webhook_url = self.settings_service.settings.LIKE_WEBHOOK_URL
+ self.base_url = self.settings_service.settings.store_url
+ self.download_webhook_url = self.settings_service.settings.download_webhook_url
+ self.like_webhook_url = self.settings_service.settings.like_webhook_url
self.components_url = f"{self.base_url}/items/components"
self.default_fields = [
"id",
diff --git a/src/backend/base/langflow/services/task/service.py b/src/backend/base/langflow/services/task/service.py
index 487b507cd..cca1645b8 100644
--- a/src/backend/base/langflow/services/task/service.py
+++ b/src/backend/base/langflow/services/task/service.py
@@ -29,7 +29,7 @@ class TaskService(Service):
def __init__(self, settings_service: "SettingsService"):
self.settings_service = settings_service
try:
- if self.settings_service.settings.CELERY_ENABLED:
+ if self.settings_service.settings.celery_enabled:
USE_CELERY = True
status = check_celery_availability()
diff --git a/src/backend/base/langflow/settings.py b/src/backend/base/langflow/settings.py
deleted file mode 100644
index 3f340df95..000000000
--- a/src/backend/base/langflow/settings.py
+++ /dev/null
@@ -1,166 +0,0 @@
-import contextlib
-import json
-import os
-from pathlib import Path
-from typing import List, Optional
-
-import yaml
-from pydantic import model_validator, validator
-from pydantic_settings import BaseSettings
-
-from langflow.utils.logger import logger
-
-BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
-
-
-class Settings(BaseSettings):
- CHAINS: dict = {}
- AGENTS: dict = {}
- PROMPTS: dict = {}
- LLMS: dict = {}
- TOOLS: dict = {}
- MEMORIES: dict = {}
- EMBEDDINGS: dict = {}
- VECTORSTORES: dict = {}
- DOCUMENTLOADERS: dict = {}
- WRAPPERS: dict = {}
- RETRIEVERS: dict = {}
- TOOLKITS: dict = {}
- TEXTSPLITTERS: dict = {}
- UTILITIES: dict = {}
- CUSTOM_COMPONENTS: dict = {}
-
- DEV: bool = False
- DATABASE_URL: Optional[str] = None
- CACHE: str = "InMemoryCache"
- REMOVE_API_KEYS: bool = False
- COMPONENTS_PATH: List[str] = []
-
- @validator("DATABASE_URL", pre=True)
- def set_database_url(cls, value):
- if not value:
- logger.debug("No database_url provided, trying LANGFLOW_DATABASE_URL env variable")
- if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"):
- value = langflow_database_url
- logger.debug("Using LANGFLOW_DATABASE_URL env variable.")
- else:
- logger.debug("No DATABASE_URL env variable, using sqlite database")
- value = "sqlite:///./langflow.db"
- return value
-
- @validator("COMPONENTS_PATH", pre=True)
- def set_components_path(cls, value):
- if os.getenv("LANGFLOW_COMPONENTS_PATH"):
- logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path")
- langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH")
- if Path(langflow_component_path).exists() and langflow_component_path not in value:
- if isinstance(langflow_component_path, list):
- for path in langflow_component_path:
- if path not in value:
- value.append(path)
- logger.debug(f"Extending {langflow_component_path} to components_path")
- elif langflow_component_path not in value:
- value.append(langflow_component_path)
- logger.debug(f"Appending {langflow_component_path} to components_path")
-
- if not value:
- value = [BASE_COMPONENTS_PATH]
- logger.debug("Setting default components path to components_path")
- elif BASE_COMPONENTS_PATH not in value:
- value.append(BASE_COMPONENTS_PATH)
- logger.debug("Adding default components path to components_path")
-
- logger.debug(f"Components path: {value}")
- return value
-
- class Config:
- validate_assignment = True
- extra = "ignore"
- env_prefix = "LANGFLOW_"
-
- @model_validator(mode="after")
- def validate_lists(cls, values):
- for key, value in values.items():
- if key != "dev" and not value:
- values[key] = []
- return values
-
- def update_from_yaml(self, file_path: str, dev: bool = False):
- new_settings = load_settings_from_yaml(file_path)
- self.CHAINS = new_settings.CHAINS or {}
- self.AGENTS = new_settings.AGENTS or {}
- self.PROMPTS = new_settings.PROMPTS or {}
- self.LLMS = new_settings.LLMS or {}
- self.TOOLS = new_settings.TOOLS or {}
- self.MEMORIES = new_settings.MEMORIES or {}
- self.WRAPPERS = new_settings.WRAPPERS or {}
- self.TOOLKITS = new_settings.TOOLKITS or {}
- self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {}
- self.UTILITIES = new_settings.UTILITIES or {}
- self.EMBEDDINGS = new_settings.EMBEDDINGS or {}
- self.VECTORSTORES = new_settings.VECTORSTORES or {}
- self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {}
- self.RETRIEVERS = new_settings.RETRIEVERS or {}
- self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {}
- self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or []
- self.DEV = dev
-
- def update_settings(self, **kwargs):
- logger.debug("Updating settings")
- for key, value in kwargs.items():
- # value may contain sensitive information, so we don't want to log it
- if not hasattr(self, key):
- logger.debug(f"Key {key} not found in settings")
- continue
- logger.debug(f"Updating {key}")
- if isinstance(getattr(self, key), list):
- # value might be a '[something]' string
- with contextlib.suppress(json.decoder.JSONDecodeError):
- value = json.loads(str(value))
- if isinstance(value, list):
- for item in value:
- if isinstance(item, Path):
- item = str(item)
- if item not in getattr(self, key):
- getattr(self, key).append(item)
- logger.debug(f"Extended {key}")
- else:
- if isinstance(value, Path):
- value = str(value)
- if value not in getattr(self, key):
- getattr(self, key).append(value)
- logger.debug(f"Appended {key}")
-
- else:
- setattr(self, key, value)
- logger.debug(f"Updated {key}")
- logger.debug(f"{key}: {getattr(self, key)}")
-
-
-def save_settings_to_yaml(settings: Settings, file_path: str):
- with open(file_path, "w") as f:
- settings_dict = settings.model_dump()
- yaml.dump(settings_dict, f)
-
-
-def load_settings_from_yaml(file_path: str) -> Settings:
- # Check if a string is a valid path or a file name
- if "/" not in file_path:
- # Get current path
- current_path = os.path.dirname(os.path.abspath(__file__))
-
- file_path = os.path.join(current_path, file_path)
-
- with open(file_path, "r") as f:
- settings_dict = yaml.safe_load(f)
- settings_dict = {k.upper(): v for k, v in settings_dict.items()}
-
- for key in settings_dict:
- if key not in Settings.model_fields.keys():
- raise KeyError(f"Key {key} not found in settings")
- logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}")
-
- return Settings(**settings_dict)
-
-
-settings = load_settings_from_yaml("config.yaml")
diff --git a/src/backend/base/langflow/template/frontend_node/__init__.py b/src/backend/base/langflow/template/frontend_node/__init__.py
index ceb2e0cb9..98c6fdb01 100644
--- a/src/backend/base/langflow/template/frontend_node/__init__.py
+++ b/src/backend/base/langflow/template/frontend_node/__init__.py
@@ -1,29 +1,6 @@
-from langflow.template.frontend_node import (
- agents,
- chains,
- custom_components,
- documentloaders,
- embeddings,
- llms,
- memories,
- prompts,
- textsplitters,
- tools,
- vectorstores,
- base,
-)
+from langflow.template.frontend_node import base, custom_components
__all__ = [
- "agents",
"base",
- "chains",
- "embeddings",
- "memories",
- "tools",
- "llms",
- "prompts",
- "vectorstores",
- "documentloaders",
- "textsplitters",
"custom_components",
]
diff --git a/src/backend/base/langflow/template/frontend_node/agents.py b/src/backend/base/langflow/template/frontend_node/agents.py
deleted file mode 100644
index 0993c1736..000000000
--- a/src/backend/base/langflow/template/frontend_node/agents.py
+++ /dev/null
@@ -1,172 +0,0 @@
-from typing import Optional
-
-from langchain.agents import types
-
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.template.base import Template
-
-NON_CHAT_AGENTS = {
- agent_type: agent_class
- for agent_type, agent_class in types.AGENT_TO_CLASS.items()
- if "chat" not in agent_type.value
-}
-
-
-class AgentFrontendNode(FrontendNode):
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- if field.name in ["suffix", "prefix"]:
- field.show = True
- if field.name == "Tools" and name == "ZeroShotAgent":
- field.field_type = "BaseTool"
- field.is_list = True
-
-
-class SQLAgentNode(FrontendNode):
- name: str = "SQLAgent"
- template: Template = Template(
- type_name="sql_agent",
- fields=[
- TemplateField(
- field_type="str", # pyright: ignore
- required=True,
- placeholder="",
- is_list=False, # pyright: ignore
- show=True,
- multiline=False,
- value="",
- name="database_uri",
- ),
- TemplateField(
- field_type="BaseLanguageModel", # pyright: ignore
- required=True,
- show=True,
- name="llm",
- display_name="LLM",
- ),
- ],
- )
- description: str = """Construct an SQL agent from an LLM and tools."""
- base_classes: list[str] = ["AgentExecutor"]
-
-
-class VectorStoreRouterAgentNode(FrontendNode):
- name: str = "VectorStoreRouterAgent"
- template: Template = Template(
- type_name="vectorstorerouter_agent",
- fields=[
- TemplateField(
- field_type="VectorStoreRouterToolkit", # pyright: ignore
- required=True,
- show=True,
- name="vectorstoreroutertoolkit",
- display_name="Vector Store Router Toolkit",
- ),
- TemplateField(
- field_type="BaseLanguageModel", # pyright: ignore
- required=True,
- show=True,
- name="llm",
- display_name="LLM",
- ),
- ],
- )
- description: str = """Construct an agent from a Vector Store Router."""
- base_classes: list[str] = ["AgentExecutor"]
-
-
-class VectorStoreAgentNode(FrontendNode):
- name: str = "VectorStoreAgent"
- template: Template = Template(
- type_name="vectorstore_agent",
- fields=[
- TemplateField(
- field_type="VectorStoreInfo", # pyright: ignore
- required=True,
- show=True,
- name="vectorstoreinfo",
- display_name="Vector Store Info",
- ),
- TemplateField(
- field_type="BaseLanguageModel", # pyright: ignore
- required=True,
- show=True,
- name="llm",
- display_name="LLM",
- ),
- ],
- )
- description: str = """Construct an agent from a Vector Store."""
- base_classes: list[str] = ["AgentExecutor"]
-
-
-class SQLDatabaseNode(FrontendNode):
- name: str = "SQLDatabase"
- template: Template = Template(
- type_name="sql_database",
- fields=[
- TemplateField(
- field_type="str", # pyright: ignore
- required=True,
- is_list=False, # pyright: ignore
- show=True,
- multiline=False,
- value="",
- name="uri",
- ),
- ],
- )
- description: str = """SQLAlchemy wrapper around a database."""
- base_classes: list[str] = ["SQLDatabase"]
-
-
-class CSVAgentNode(FrontendNode):
- name: str = "CSVAgent"
- template: Template = Template(
- type_name="csv_agent",
- fields=[
- TemplateField(
- field_type="file", # pyright: ignore
- required=True,
- show=True,
- name="path",
- value="",
- file_types=[".csv"], # pyright: ignore
- ),
- TemplateField(
- field_type="BaseLanguageModel", # pyright: ignore
- required=True,
- show=True,
- name="llm",
- display_name="LLM",
- ),
- ],
- )
- description: str = """Construct a CSV agent from a CSV and tools."""
- base_classes: list[str] = ["AgentExecutor"]
-
-
-class JsonAgentNode(FrontendNode):
- name: str = "JsonAgent"
- template: Template = Template(
- type_name="json_agent",
- fields=[
- TemplateField(
- field_type="BaseToolkit", # pyright: ignore
- required=True,
- show=True,
- name="toolkit",
- ),
- TemplateField(
- field_type="BaseLanguageModel", # pyright: ignore
- required=True,
- show=True,
- name="llm",
- display_name="LLM",
- ),
- ],
- )
- description: str = """Construct a json agent from an LLM and tools."""
- base_classes: list[str] = ["AgentExecutor"]
diff --git a/src/backend/base/langflow/template/frontend_node/chains.py b/src/backend/base/langflow/template/frontend_node/chains.py
deleted file mode 100644
index 4ce23a316..000000000
--- a/src/backend/base/langflow/template/frontend_node/chains.py
+++ /dev/null
@@ -1,265 +0,0 @@
-from typing import Optional
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.frontend_node.constants import QA_CHAIN_TYPES
-from langflow.template.template.base import Template
-
-
-class ChainFrontendNode(FrontendNode):
- output_type: str = "Chain"
-
- def add_extra_base_classes(self) -> None:
- self.base_classes.append("Text")
-
- def add_extra_fields(self) -> None:
- if self.template.type_name == "ConversationalRetrievalChain":
- # add memory
- self.template.add_field(
- TemplateField(
- field_type="BaseChatMemory",
- required=True,
- show=True,
- name="memory",
- advanced=False,
- )
- )
- # add return_source_documents
- self.template.add_field(
- TemplateField(
- field_type="bool",
- required=False,
- show=True,
- name="return_source_documents",
- advanced=False,
- value=True,
- display_name="Return source documents",
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=True,
- is_list=True,
- show=True,
- multiline=False,
- options=QA_CHAIN_TYPES,
- value=QA_CHAIN_TYPES[0],
- name="chain_type",
- advanced=False,
- )
- )
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
-
- if "name" == "RetrievalQA" and field.name == "memory":
- field.show = False
- field.required = False
-
- field.advanced = False
- if "key" in str(field.name):
- field.password = False
- field.show = False
- if field.name in ["input_key", "output_key"]:
- field.required = True
- field.show = True
- field.advanced = True
-
- # We should think of a way to deal with this later
- # if field.field_type == "PromptTemplate":
- # field.field_type = "str"
- # field.multiline = True
- # field.show = True
- # field.advanced = False
- # field.value = field.value.template
-
- # Separated for possible future changes
- if field.name == "prompt" and field.value is None:
- field.required = True
- field.show = True
- field.advanced = False
- if field.name == "memory":
- # field.required = False
- field.show = True
- field.advanced = False
- if field.name == "verbose":
- field.required = False
- field.show = False
- field.advanced = True
- if field.name == "llm":
- field.required = True
- field.show = True
- field.advanced = False
- field.field_type = "BaseLanguageModel"
- field.is_list = False
-
- if field.name == "return_source_documents":
- field.required = False
- field.show = True
- field.advanced = True
- field.value = True
-
-
-class SeriesCharacterChainNode(FrontendNode):
- output_type: str = "Chain"
- name: str = "SeriesCharacterChain"
- template: Template = Template(
- type_name="SeriesCharacterChain",
- fields=[
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- advanced=False,
- multiline=False,
- name="character",
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- advanced=False,
- multiline=False,
- name="series",
- ),
- TemplateField(
- field_type="BaseLanguageModel",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- advanced=False,
- multiline=False,
- name="llm",
- display_name="LLM",
- ),
- ],
- )
- description: str = (
- "SeriesCharacterChain is a chain you can use to have a conversation with a character from a series." # noqa
- )
- base_classes: list[str] = [
- "LLMChain",
- "BaseCustomChain",
- "Chain",
- "ConversationChain",
- "SeriesCharacterChain",
- "function",
- ]
-
-
-class TimeTravelGuideChainNode(FrontendNode):
- output_type: str = "Chain"
- name: str = "TimeTravelGuideChain"
- template: Template = Template(
- type_name="TimeTravelGuideChain",
- fields=[
- TemplateField(
- field_type="BaseLanguageModel",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- advanced=False,
- multiline=False,
- name="llm",
- display_name="LLM",
- ),
- TemplateField(
- field_type="BaseChatMemory",
- required=False,
- show=True,
- name="memory",
- advanced=False,
- ),
- ],
- )
- description: str = "Time travel guide chain."
- base_classes: list[str] = [
- "LLMChain",
- "BaseCustomChain",
- "TimeTravelGuideChain",
- "Chain",
- "ConversationChain",
- ]
-
-
-class MidJourneyPromptChainNode(FrontendNode):
- output_type: str = "Chain"
- name: str = "MidJourneyPromptChain"
- template: Template = Template(
- type_name="MidJourneyPromptChain",
- fields=[
- TemplateField(
- field_type="BaseLanguageModel",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- advanced=False,
- multiline=False,
- name="llm",
- display_name="LLM",
- ),
- TemplateField(
- field_type="BaseChatMemory",
- required=False,
- show=True,
- name="memory",
- advanced=False,
- ),
- ],
- )
- description: str = "MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts."
- base_classes: list[str] = [
- "LLMChain",
- "BaseCustomChain",
- "Chain",
- "ConversationChain",
- "MidJourneyPromptChain",
- ]
-
-
-class CombineDocsChainNode(FrontendNode):
- output_type: str = "Chain"
- name: str = "CombineDocsChain"
- template: Template = Template(
- type_name="load_qa_chain",
- fields=[
- TemplateField(
- field_type="str",
- required=True,
- is_list=True,
- show=True,
- multiline=False,
- options=QA_CHAIN_TYPES,
- value=QA_CHAIN_TYPES[0],
- name="chain_type",
- advanced=False,
- ),
- TemplateField(
- field_type="BaseLanguageModel",
- required=True,
- show=True,
- name="llm",
- display_name="LLM",
- advanced=False,
- ),
- ],
- )
- description: str = """Load question answering chain."""
- base_classes: list[str] = ["BaseCombineDocumentsChain", "function"]
-
- def to_dict(self):
- return super().to_dict()
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- # do nothing and don't return anything
- pass
diff --git a/src/backend/base/langflow/template/frontend_node/custom_components.py b/src/backend/base/langflow/template/frontend_node/custom_components.py
index 577c89684..932d30799 100644
--- a/src/backend/base/langflow/template/frontend_node/custom_components.py
+++ b/src/backend/base/langflow/template/frontend_node/custom_components.py
@@ -4,7 +4,7 @@ from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.template.base import Template
-DEFAULT_CUSTOM_COMPONENT_CODE = """from langflow.interface.custom.custom_component import CustomComponent
+DEFAULT_CUSTOM_COMPONENT_CODE = """from langflow.custom import CustomComponent
from typing import Optional, List, Dict, Union
from langflow.field_typing import (
diff --git a/src/backend/base/langflow/template/frontend_node/documentloaders.py b/src/backend/base/langflow/template/frontend_node/documentloaders.py
deleted file mode 100644
index 31e13894a..000000000
--- a/src/backend/base/langflow/template/frontend_node/documentloaders.py
+++ /dev/null
@@ -1,301 +0,0 @@
-from typing import ClassVar, Dict, Optional
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-
-
-def build_file_field(fileTypes: list, name: str = "file_path") -> TemplateField:
- """Build a template field for a document loader."""
- return TemplateField(
- field_type="file",
- required=True,
- show=True,
- name=name,
- value="",
- file_types=fileTypes,
- )
-
-
-class DocumentLoaderFrontNode(FrontendNode):
- def add_extra_base_classes(self) -> None:
- self.base_classes = ["Document"]
- self.output_types = ["Document"]
-
- file_path_templates: ClassVar[Dict] = {
- "AirbyteJSONLoader": build_file_field(
- fileTypes=[".json"],
- ),
- "CoNLLULoader": build_file_field(
- fileTypes=[".csv"],
- ),
- "CSVLoader": build_file_field(
- fileTypes=[".csv"],
- ),
- "UnstructuredEmailLoader": build_file_field(
- fileTypes=[".eml"],
- ),
- "EverNoteLoader": build_file_field(
- fileTypes=[".xml"],
- ),
- "FacebookChatLoader": build_file_field(
- fileTypes=[".json"],
- ),
- "BSHTMLLoader": build_file_field(
- fileTypes=[".html"],
- ),
- "UnstructuredHTMLLoader": build_file_field(fileTypes=[".html"]),
- "UnstructuredImageLoader": build_file_field(
- fileTypes=[".jpg", ".jpeg", ".png", ".gif", ".bmp"],
- ),
- "UnstructuredMarkdownLoader": build_file_field(
- fileTypes=[".md"],
- ),
- "PyPDFLoader": build_file_field(
- fileTypes=[".pdf"],
- ),
- "UnstructuredPowerPointLoader": build_file_field(
- fileTypes=[".pptx", ".ppt"],
- ),
- "SRTLoader": build_file_field(
- fileTypes=[".srt"],
- ),
- "TelegramChatLoader": build_file_field(
- fileTypes=[".json"],
- ),
- "TextLoader": build_file_field(
- fileTypes=[".txt"],
- ),
- "UnstructuredWordDocumentLoader": build_file_field(
- fileTypes=[".docx", ".doc"],
- ),
- }
-
- def add_extra_fields(self) -> None:
- name = None
- display_name = "Web Page"
- if self.template.type_name in {"GitLoader"}:
- # Add fields repo_path, clone_url, branch and file_filter
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=True,
- show=True,
- name="repo_path",
- value="",
- display_name="Path to repository",
- advanced=False,
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=False,
- show=True,
- name="clone_url",
- value="",
- display_name="Clone URL",
- advanced=False,
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=True,
- show=True,
- name="branch",
- value="",
- display_name="Branch",
- advanced=False,
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=False,
- show=True,
- name="file_filter",
- value="",
- display_name="File extensions (comma-separated)",
- advanced=False,
- )
- )
- elif self.template.type_name in {"SlackDirectoryLoader"}:
- self.template.add_field(
- TemplateField(
- field_type="file",
- required=True,
- show=True,
- name="zip_path",
- value="",
- display_name="Path to zip file",
- file_types=[".zip"],
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=False,
- show=True,
- name="workspace_url",
- value="",
- display_name="Workspace URL",
- advanced=False,
- )
- )
- elif self.template.type_name in self.file_path_templates:
- self.template.add_field(self.file_path_templates[self.template.type_name])
- elif self.template.type_name in {
- "WebBaseLoader",
- "AZLyricsLoader",
- "CollegeConfidentialLoader",
- "HNLoader",
- "IFixitLoader",
- "IMSDbLoader",
- "GutenbergLoader",
- }:
- name = "web_path"
- elif self.template.type_name in {"GutenbergLoader"}:
- name = "file_path"
- elif self.template.type_name in {"GitbookLoader"}:
- name = "web_page"
- elif self.template.type_name in {
- "DirectoryLoader",
- "ReadTheDocsLoader",
- "NotionDirectoryLoader",
- "PyPDFDirectoryLoader",
- }:
- name = "path"
- display_name = "Local directory"
- if name:
- if self.template.type_name in {"DirectoryLoader"}:
- for field in build_directory_loader_fields():
- self.template.add_field(field)
- else:
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=True,
- show=True,
- name=name,
- value="",
- display_name=display_name,
- )
- )
- # add a metadata field of type dict
- self.template.add_field(
- TemplateField(
- field_type="dict",
- required=False,
- show=True,
- name="metadata",
- value={},
- display_name="Metadata",
- multiline=False,
- )
- )
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
- if field.name == "metadata":
- field.show = True
- field.advanced = False
- field.show = True
-
-
-def build_directory_loader_fields():
- # if loader_kwargs is None:
- # loader_kwargs = {}
- # self.path = path
- # self.glob = glob
- # self.load_hidden = load_hidden
- # self.loader_cls = loader_cls
- # self.loader_kwargs = loader_kwargs
- # self.silent_errors = silent_errors
- # self.recursive = recursive
- # self.show_progress = show_progress
- # self.use_multithreading = use_multithreading
- # self.max_concurrency = max_concurrency
- # Based on the above fields, we can build the following fields:
- # path, glob, load_hidden, silent_errors, recursive, show_progress, use_multithreading, max_concurrency
- # path
- path = TemplateField(
- field_type="str",
- required=True,
- show=True,
- name="path",
- value="",
- display_name="Local directory",
- advanced=False,
- )
- # glob
- glob = TemplateField(
- field_type="str",
- required=True,
- show=True,
- name="glob",
- value="**/*.txt",
- display_name="glob",
- advanced=False,
- )
- # load_hidden
- load_hidden = TemplateField(
- field_type="bool",
- required=False,
- show=True,
- name="load_hidden",
- value="False",
- display_name="Load hidden files",
- advanced=True,
- )
- # silent_errors
- silent_errors = TemplateField(
- field_type="bool",
- required=False,
- show=True,
- name="silent_errors",
- value="False",
- display_name="Silent errors",
- advanced=True,
- )
- # recursive
- recursive = TemplateField(
- field_type="bool",
- required=False,
- show=True,
- name="recursive",
- value="True",
- display_name="Recursive",
- advanced=True,
- )
-
- # use_multithreading
- use_multithreading = TemplateField(
- field_type="bool",
- required=False,
- show=True,
- name="use_multithreading",
- value="True",
- display_name="Use multithreading",
- advanced=True,
- )
- # max_concurrency
- max_concurrency = TemplateField(
- field_type="int",
- required=False,
- show=True,
- name="max_concurrency",
- value=10,
- display_name="Max concurrency",
- advanced=True,
- )
-
- return (
- path,
- glob,
- load_hidden,
- silent_errors,
- recursive,
- use_multithreading,
- max_concurrency,
- )
diff --git a/src/backend/base/langflow/template/frontend_node/embeddings.py b/src/backend/base/langflow/template/frontend_node/embeddings.py
deleted file mode 100644
index a2974487e..000000000
--- a/src/backend/base/langflow/template/frontend_node/embeddings.py
+++ /dev/null
@@ -1,119 +0,0 @@
-from typing import Optional
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-
-
-class EmbeddingFrontendNode(FrontendNode):
- def add_extra_fields(self) -> None:
- if "VertexAI" in self.template.type_name:
- # Add credentials field which should of type file.
- self.template.add_field(
- TemplateField(
- field_type="file",
- required=False,
- show=True,
- name="credentials",
- value="",
- file_types=[".json"],
- )
- )
-
- @staticmethod
- def format_vertex_field(field: TemplateField, name: str):
- if "VertexAI" in name:
- key = field.name or ""
- advanced_fields = [
- "verbose",
- "top_p",
- "top_k",
- "max_output_tokens",
- ]
- if key in advanced_fields:
- field.advanced = True
- show_fields = [
- "verbose",
- "project",
- "location",
- "credentials",
- "max_output_tokens",
- "model_name",
- "temperature",
- "top_p",
- "top_k",
- ]
-
- if key in show_fields:
- field.show = True
-
- @staticmethod
- def format_jina_fields(field: TemplateField):
- name = field.name or ""
- if "jina" in name:
- field.show = True
- field.advanced = False
-
- if "auth" in name or "token" in name:
- field.password = True
- field.show = True
- field.advanced = False
-
- if name == "jina_api_url":
- field.show = True
- field.advanced = True
- field.display_name = "Jina API URL"
- field.password = False
-
- @staticmethod
- def format_openai_fields(field: TemplateField):
- name = field.name or ""
- if "openai" in name:
- field.show = True
- field.advanced = True
- split_name = name.split("_")
- title_name = " ".join([s.capitalize() for s in split_name])
- field.display_name = title_name.replace("Openai", "OpenAI").replace("Api", "API")
-
- if "api_key" in name:
- field.password = True
- field.show = True
- field.advanced = False
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
- if name and "vertex" in name.lower():
- EmbeddingFrontendNode.format_vertex_field(field, name)
- field.advanced = not field.required
- field.show = True
- key = field.name or ""
- if key == "headers":
- field.show = False
- if key == "model_kwargs":
- field.field_type = "dict"
- field.advanced = True
- field.show = True
- elif key in [
- "model_name",
- "temperature",
- "model_file",
- "model_type",
- "deployment_name",
- "credentials",
- ]:
- field.advanced = False
- field.show = True
- if key == "credentials":
- field.field_type = "file"
- if name == "VertexAI" and key not in [
- "callbacks",
- "client",
- "stop",
- "tags",
- "cache",
- ]:
- field.show = True
-
- # Format Jina fields
- EmbeddingFrontendNode.format_jina_fields(field)
- EmbeddingFrontendNode.format_openai_fields(field)
diff --git a/src/backend/base/langflow/template/frontend_node/llms.py b/src/backend/base/langflow/template/frontend_node/llms.py
deleted file mode 100644
index 7bf5a8cb6..000000000
--- a/src/backend/base/langflow/template/frontend_node/llms.py
+++ /dev/null
@@ -1,154 +0,0 @@
-from typing import Optional
-
-from langflow.services.database.models.base import orjson_dumps
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.frontend_node.constants import CTRANSFORMERS_DEFAULT_CONFIG, OPENAI_API_BASE_INFO
-
-
-class LLMFrontendNode(FrontendNode):
- def add_extra_fields(self) -> None:
- if "VertexAI" in self.template.type_name:
- # Add credentials field which should of type file.
- self.template.add_field(
- TemplateField(
- field_type="file",
- required=False,
- show=True,
- name="credentials",
- value="",
- file_types=[".json"],
- )
- )
-
- @staticmethod
- def format_vertex_field(field: TemplateField, name: str):
- key = field.name or ""
- if "VertexAI" in name:
- advanced_fields = [
- "tuned_model_name",
- "verbose",
- "top_p",
- "top_k",
- "max_output_tokens",
- ]
- if key in advanced_fields:
- field.advanced = True
- show_fields = [
- "tuned_model_name",
- "verbose",
- "project",
- "location",
- "credentials",
- "max_output_tokens",
- "model_name",
- "temperature",
- "top_p",
- "top_k",
- ]
-
- if key in show_fields:
- field.show = True
-
- @staticmethod
- def format_openai_field(field: TemplateField):
- key = field.name or ""
- if "openai" in key.lower():
- field.display_name = (key.title().replace("Openai", "OpenAI").replace("_", " ")).replace("Api", "API")
-
- if "key" not in key.lower() and "token" not in key.lower():
- field.password = False
-
- if key == "openai_api_base":
- field.info = OPENAI_API_BASE_INFO
-
- def add_extra_base_classes(self) -> None:
- if "BaseLanguageModel" not in self.base_classes:
- self.base_classes.append("BaseLanguageModel")
-
- @staticmethod
- def format_azure_field(field: TemplateField):
- key = field.name or ""
- if key == "model_name":
- field.show = False # Azure uses deployment_name instead of model_name.
- elif key == "openai_api_type":
- field.show = False
- field.password = False
- field.value = "azure"
- elif key == "openai_api_version":
- field.password = False
-
- @staticmethod
- def format_llama_field(field: TemplateField):
- field.show = True
- field.advanced = not field.required
-
- @staticmethod
- def format_ctransformers_field(field: TemplateField):
- key = field.name or ""
- if key == "config":
- field.show = True
- field.advanced = True
- field.value = orjson_dumps(CTRANSFORMERS_DEFAULT_CONFIG, indent_2=True)
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- display_names_dict = {
- "huggingfacehub_api_token": "HuggingFace Hub API Token",
- }
- FrontendNode.format_field(field, name)
- LLMFrontendNode.format_openai_field(field)
- LLMFrontendNode.format_ctransformers_field(field)
- if name and "azure" in name.lower():
- LLMFrontendNode.format_azure_field(field)
- if name and "llama" in name.lower():
- LLMFrontendNode.format_llama_field(field)
- if name and "vertex" in name.lower():
- LLMFrontendNode.format_vertex_field(field, name)
- SHOW_FIELDS = ["repo_id"]
- key = field.name or ""
- if key in SHOW_FIELDS:
- field.show = True
-
- if "api" in key and ("key" in key or ("token" in key and "tokens" not in key)):
- field.password = True
- field.show = True
- # Required should be False to support
- # loading the API key from environment variables
- field.required = False
- field.advanced = False
-
- if key == "task":
- field.required = True
- field.show = True
- field.is_list = True
- field.options = ["text-generation", "text2text-generation", "summarization"]
- field.value = field.options[0]
- field.advanced = True
-
- if display_name := display_names_dict.get(key):
- field.display_name = display_name
- if key == "model_kwargs":
- field.field_type = "dict"
- field.advanced = True
- field.show = True
- elif key in [
- "model_name",
- "temperature",
- "model_file",
- "model_type",
- "deployment_name",
- "credentials",
- ]:
- field.advanced = False
- field.show = True
- if key == "credentials":
- field.field_type = "file"
- if name == "VertexAI" and key not in [
- "callbacks",
- "client",
- "stop",
- "tags",
- "cache",
- ]:
- field.show = True
diff --git a/src/backend/base/langflow/template/frontend_node/memories.py b/src/backend/base/langflow/template/frontend_node/memories.py
deleted file mode 100644
index 1cdc8febb..000000000
--- a/src/backend/base/langflow/template/frontend_node/memories.py
+++ /dev/null
@@ -1,190 +0,0 @@
-from typing import Optional
-
-from langchain_community.chat_message_histories.mongodb import DEFAULT_COLLECTION_NAME, DEFAULT_DBNAME
-from langchain_community.chat_message_histories.postgres import DEFAULT_CONNECTION_STRING
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.frontend_node.constants import INPUT_KEY_INFO, OUTPUT_KEY_INFO
-from langflow.template.template.base import Template
-
-
-class MemoryFrontendNode(FrontendNode):
- frozen: bool = True
-
- def add_extra_fields(self) -> None:
- # chat history should have another way to add common field?
- # prevent adding incorect field in ChatMessageHistory
- base_message_classes = ["BaseEntityStore", "BaseChatMessageHistory"]
- if any(base_class in self.base_classes for base_class in base_message_classes):
- return
-
- # add return_messages field
- self.template.add_field(
- TemplateField(
- field_type="bool",
- required=False,
- show=True,
- name="return_messages",
- advanced=False,
- value=False,
- )
- )
- # add input_key and output_key str fields
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=False,
- show=True,
- name="input_key",
- advanced=True,
- value="",
- )
- )
- if self.template.type_name not in {"VectorStoreRetrieverMemory"}:
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=False,
- show=True,
- name="output_key",
- advanced=True,
- value="",
- )
- )
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
-
- if not isinstance(field.value, str):
- field.value = None
- if field.name == "k":
- field.required = True
- field.show = True
- field.field_type = "int"
- field.value = 10
- field.display_name = "Memory Size"
- field.password = False
- if field.name == "return_messages":
- field.required = False
- field.show = True
- field.advanced = False
- if field.name in {"input_key", "output_key"}:
- field.required = False
- field.show = True
- field.advanced = False
- field.value = ""
- field.info = INPUT_KEY_INFO if field.name == "input_key" else OUTPUT_KEY_INFO
-
- if field.name == "memory_key":
- field.value = "chat_history"
- if field.name == "chat_memory":
- field.show = True
- field.advanced = False
- field.required = False
- if field.name == "url":
- field.show = True
- if field.name == "entity_store":
- field.show = False
- if name == "ConversationEntityMemory" and field.name == "memory_key":
- field.show = False
- field.required = False
-
- if name == "MotorheadMemory":
- if field.name == "chat_memory":
- field.show = False
- field.required = False
- elif field.name == "client_id":
- field.show = True
- field.advanced = False
-
-
-class PostgresChatMessageHistoryFrontendNode(MemoryFrontendNode):
- name: str = "PostgresChatMessageHistory"
- template: Template = Template(
- type_name="PostgresChatMessageHistory",
- fields=[
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- name="session_id",
- ),
- TemplateField(
- field_type="str",
- required=True,
- show=True,
- name="connection_string",
- value=DEFAULT_CONNECTION_STRING,
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- value="message_store",
- name="table_name",
- ),
- ],
- )
- description: str = "Memory store with Postgres"
- base_classes: list[str] = ["PostgresChatMessageHistory", "BaseChatMessageHistory"]
-
-
-class MongoDBChatMessageHistoryFrontendNode(MemoryFrontendNode):
- name: str = "MongoDBChatMessageHistory"
- template: Template = Template(
- # langchain/memory/chat_message_histories/mongodb.py
- # connection_string: str,
- # session_id: str,
- # database_name: str = DEFAULT_DBNAME,
- # collection_name: str = DEFAULT_COLLECTION_NAME,
- type_name="MongoDBChatMessageHistory",
- fields=[
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- name="session_id",
- ),
- TemplateField(
- field_type="str",
- required=True,
- show=True,
- name="connection_string",
- value="",
- info="MongoDB connection string (e.g mongodb://mongo_user:password123@mongo:27017)",
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- value=DEFAULT_DBNAME,
- name="database_name",
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- value=DEFAULT_COLLECTION_NAME,
- name="collection_name",
- ),
- ],
- )
- description: str = "Memory store with MongoDB"
- base_classes: list[str] = ["MongoDBChatMessageHistory", "BaseChatMessageHistory"]
diff --git a/src/backend/base/langflow/template/frontend_node/prompts.py b/src/backend/base/langflow/template/frontend_node/prompts.py
deleted file mode 100644
index 03445f753..000000000
--- a/src/backend/base/langflow/template/frontend_node/prompts.py
+++ /dev/null
@@ -1,107 +0,0 @@
-from typing import Optional
-
-from langchain.agents.mrkl import prompt
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.frontend_node.constants import DEFAULT_PROMPT, HUMAN_PROMPT, SYSTEM_PROMPT
-from langflow.template.template.base import Template
-
-
-class PromptFrontendNode(FrontendNode):
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
- # if field.field_type == "StringPromptTemplate"
- # change it to str
- PROMPT_FIELDS = [
- "template",
- "suffix",
- "prefix",
- "examples",
- "format_instructions",
- ]
- key = field.name or ""
- if field.field_type == "StringPromptTemplate" and "Message" in str(name):
- field.field_type = "prompt"
- field.multiline = True
- field.value = HUMAN_PROMPT if "Human" in key else SYSTEM_PROMPT
- if key == "template" and field.value == "":
- field.value = DEFAULT_PROMPT
-
- if key and key in PROMPT_FIELDS:
- field.field_type = "prompt"
- field.advanced = False
-
- if "Union" in field.field_type and "BaseMessagePromptTemplate" in field.field_type:
- field.field_type = "BaseMessagePromptTemplate"
-
- # All prompt fields should be password=False
- field.password = False
- field.dynamic = True
-
-
-class PromptTemplateNode(FrontendNode):
- name: str = "PromptTemplate"
- template: Template
- description: str
- base_classes: list[str] = ["BasePromptTemplate"]
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
-
- if (field.name or "") == "examples":
- field.advanced = False
-
-
-class BasePromptFrontendNode(FrontendNode):
- name: str
- template: Template
- description: str
- base_classes: list[str]
-
-
-class ZeroShotPromptNode(BasePromptFrontendNode):
- name: str = "ZeroShotPrompt"
- template: Template = Template(
- type_name="ZeroShotPrompt",
- fields=[
- TemplateField(
- field_type="str",
- required=False,
- placeholder="",
- is_list=False,
- show=True,
- multiline=True,
- value=prompt.PREFIX,
- name="prefix",
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=True,
- value=prompt.FORMAT_INSTRUCTIONS,
- name="format_instructions",
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=True,
- value=prompt.SUFFIX,
- name="suffix",
- ),
- ],
- )
- description: str = "Prompt template for Zero Shot Agent."
- base_classes: list[str] = ["BasePromptTemplate"]
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- PromptFrontendNode.format_field(field, name)
diff --git a/src/backend/base/langflow/template/frontend_node/retrievers.py b/src/backend/base/langflow/template/frontend_node/retrievers.py
deleted file mode 100644
index b482c8b84..000000000
--- a/src/backend/base/langflow/template/frontend_node/retrievers.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from typing import Optional
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-
-
-class RetrieverFrontendNode(FrontendNode):
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
- # Define common field attributes
- field.show = True
- if field.name == "parser_key":
- field.display_name = "Parser Key"
- field.password = False
diff --git a/src/backend/base/langflow/template/frontend_node/textsplitters.py b/src/backend/base/langflow/template/frontend_node/textsplitters.py
deleted file mode 100644
index eb302e996..000000000
--- a/src/backend/base/langflow/template/frontend_node/textsplitters.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from langchain.text_splitter import Language
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-
-
-class TextSplittersFrontendNode(FrontendNode):
- def add_extra_base_classes(self) -> None:
- self.base_classes = ["Document"]
- self.output_types = ["Document"]
-
- def add_extra_fields(self) -> None:
- self.template.add_field(
- TemplateField(
- field_type="Document",
- required=True,
- show=True,
- name="documents",
- is_list=True,
- )
- )
- name = "separator"
- if self.template.type_name == "CharacterTextSplitter":
- name = "separator"
- elif self.template.type_name == "RecursiveCharacterTextSplitter":
- name = "separators"
- # Add a field for type of separator
- # which will have Text or any value from the
- # Language enum
- options = [x.value for x in Language] + ["Text"]
- options.sort()
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=True,
- show=True,
- name="separator_type",
- advanced=False,
- is_list=True,
- options=options,
- value="Text",
- display_name="Separator Type",
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="str",
- required=True,
- show=True,
- value="\\n",
- name=name,
- display_name="Separator",
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="int",
- required=True,
- show=True,
- value=1000,
- name="chunk_size",
- display_name="Chunk Size",
- )
- )
- self.template.add_field(
- TemplateField(
- field_type="int",
- required=True,
- show=True,
- value=200,
- name="chunk_overlap",
- display_name="Chunk Overlap",
- )
- )
diff --git a/src/backend/base/langflow/template/frontend_node/tools.py b/src/backend/base/langflow/template/frontend_node/tools.py
deleted file mode 100644
index 5bed90c05..000000000
--- a/src/backend/base/langflow/template/frontend_node/tools.py
+++ /dev/null
@@ -1,130 +0,0 @@
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-from langflow.template.template.base import Template
-from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION
-
-
-class ToolNode(FrontendNode):
- name: str = "Tool"
- template: Template = Template(
- type_name="Tool",
- fields=[
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=True,
- value="",
- name="name",
- advanced=False,
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=True,
- value="",
- name="description",
- advanced=False,
- ),
- TemplateField(
- name="func",
- field_type="Callable",
- required=True,
- is_list=False,
- show=True,
- multiline=True,
- advanced=False,
- ),
- TemplateField(
- field_type="bool",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- value=False,
- name="return_direct",
- ),
- ],
- )
- description: str = "Converts a chain, agent or function into a tool."
- base_classes: list[str] = ["Tool", "BaseTool"]
-
-
-class PythonFunctionToolNode(FrontendNode):
- name: str = "PythonFunctionTool"
- template: Template = Template(
- type_name="PythonFunctionTool",
- fields=[
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- value="",
- name="name",
- advanced=False,
- ),
- TemplateField(
- field_type="str",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- value="",
- name="description",
- advanced=False,
- ),
- TemplateField(
- field_type="code",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- value=DEFAULT_PYTHON_FUNCTION,
- name="code",
- advanced=False,
- ),
- TemplateField(
- field_type="bool",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- multiline=False,
- value=False,
- name="return_direct",
- ),
- ],
- )
- description: str = "Python function to be executed."
- base_classes: list[str] = ["BaseTool", "Tool"]
-
-
-class PythonFunctionNode(FrontendNode):
- name: str = "PythonFunction"
- template: Template = Template(
- type_name="PythonFunction",
- fields=[
- TemplateField(
- field_type="code",
- required=True,
- placeholder="",
- is_list=False,
- show=True,
- value=DEFAULT_PYTHON_FUNCTION,
- name="code",
- advanced=False,
- )
- ],
- )
- description: str = "Python function to be executed."
- base_classes: list[str] = ["Callable"]
diff --git a/src/backend/base/langflow/template/frontend_node/utilities.py b/src/backend/base/langflow/template/frontend_node/utilities.py
deleted file mode 100644
index 51849189c..000000000
--- a/src/backend/base/langflow/template/frontend_node/utilities.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import ast
-from typing import Optional
-
-from langflow.services.database.models.base import orjson_dumps
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-
-
-class UtilitiesFrontendNode(FrontendNode):
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
- # field.field_type could be "Literal['news', 'search', 'places', 'images']
- # we need to convert it to a list
- # It seems it could also be like "typing_extensions.['news', 'search', 'places', 'images']"
- if "Literal" in field.field_type:
- field_type = field.field_type.replace("typing_extensions.", "")
- field_type = field_type.replace("Literal", "")
- field.options = ast.literal_eval(field_type)
- field.is_list = True
- field.field_type = "str"
-
- if isinstance(field.value, dict):
- field.value = orjson_dumps(field.value)
diff --git a/src/backend/base/langflow/template/frontend_node/vectorstores.py b/src/backend/base/langflow/template/frontend_node/vectorstores.py
deleted file mode 100644
index 1f49f76a9..000000000
--- a/src/backend/base/langflow/template/frontend_node/vectorstores.py
+++ /dev/null
@@ -1,369 +0,0 @@
-from typing import List, Optional
-
-from langflow.template.field.base import TemplateField
-from langflow.template.frontend_node.base import FrontendNode
-
-BASIC_FIELDS = [
- "work_dir",
- "collection_name",
- "api_key",
- "location",
- "persist_directory",
- "persist",
- "weaviate_url",
- "es_url",
- "index_name",
- "namespace",
- "folder_path",
- "table_name",
- "query_name",
- "supabase_url",
- "supabase_service_key",
- "mongodb_atlas_cluster_uri",
- "collection_name",
- "db_name",
-]
-ADVANCED_FIELDS = [
- "n_dim",
- "key",
- "prefix",
- "distance_func",
- "content_payload_key",
- "metadata_payload_key",
- "timeout",
- "host",
- "path",
- "url",
- "port",
- "https",
- "prefer_grpc",
- "grpc_port",
- "pinecone_api_key",
- "pinecone_env",
- "client_kwargs",
- "search_kwargs",
- "chroma_server_host",
- "chroma_server_http_port",
- "chroma_server_ssl_enabled",
- "chroma_server_grpc_port",
- "chroma_server_cors_allow_origins",
-]
-
-
-class VectorStoreFrontendNode(FrontendNode):
- def add_extra_fields(self) -> None:
- extra_fields: List[TemplateField] = []
- # Add search_kwargs field
- extra_field = TemplateField(
- name="search_kwargs",
- field_type="NestedDict",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- value="{}",
- )
- extra_fields.append(extra_field)
- if self.template.type_name == "Weaviate":
- extra_field = TemplateField(
- name="weaviate_url",
- field_type="str",
- required=True,
- placeholder="http://localhost:8080",
- show=True,
- advanced=False,
- multiline=False,
- value="http://localhost:8080",
- )
- # Add client_kwargs field
- extra_field2 = TemplateField(
- name="client_kwargs",
- field_type="code",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- value="{}",
- )
- extra_fields.extend((extra_field, extra_field2))
-
- elif self.template.type_name == "Chroma":
- # New bool field for persist parameter
- chroma_fields = [
- TemplateField(
- name="persist",
- field_type="bool",
- required=False,
- show=True,
- advanced=False,
- value=False,
- display_name="Persist",
- ),
- # chroma_server_grpc_port: str | None = None,
- TemplateField(
- name="chroma_server_host",
- field_type="str",
- required=False,
- show=True,
- advanced=True,
- display_name="Chroma Server Host",
- ),
- TemplateField(
- name="chroma_server_http_port",
- field_type="str",
- required=False,
- show=True,
- advanced=True,
- display_name="Chroma Server HTTP Port",
- ),
- TemplateField(
- name="chroma_server_ssl_enabled",
- field_type="bool",
- required=False,
- show=True,
- advanced=True,
- value=False,
- display_name="Chroma Server SSL Enabled",
- ),
- TemplateField(
- name="chroma_server_grpc_port",
- field_type="str",
- required=False,
- show=True,
- advanced=True,
- display_name="Chroma Server GRPC Port",
- ),
- TemplateField(
- name="chroma_server_cors_allow_origins",
- field_type="str",
- required=False,
- is_list=True,
- show=True,
- advanced=True,
- display_name="Chroma Server CORS Allow Origins",
- ),
- ]
-
- extra_fields.extend(chroma_fields)
- elif self.template.type_name == "Pinecone":
- # add pinecone_api_key and pinecone_env
- extra_field = TemplateField(
- name="pinecone_api_key",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- password=True,
- value="",
- )
- extra_field2 = TemplateField(
- name="pinecone_env",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- value="",
- )
- extra_fields.extend((extra_field, extra_field2))
-
- elif self.template.type_name == "ElasticsearchStore":
- # add elastic and elastic credentials
- extra_field = TemplateField(
- name="es_url",
- field_type="str",
- required=True,
- placeholder="http://localhost:9200",
- show=True,
- advanced=False,
- multiline=False,
- value="http://localhost:9200",
- display_name="Elasticsearch URL",
- )
- extra_field2 = TemplateField(
- name="index_name",
- field_type="str",
- required=True,
- placeholder="test-index",
- show=True,
- advanced=False,
- multiline=False,
- value="test-index",
- display_name="Index Name",
- )
- extra_fields.extend((extra_field, extra_field2))
-
- elif self.template.type_name == "FAISS":
- extra_field = TemplateField(
- name="folder_path",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- display_name="Local Path",
- value="",
- )
- extra_field2 = TemplateField(
- name="index_name",
- field_type="str",
- required=False,
- show=True,
- advanced=False,
- value="",
- display_name="Index Name",
- )
- extra_fields.extend((extra_field, extra_field2))
- elif self.template.type_name == "SupabaseVectorStore":
- self.display_name = "Supabase"
- # Add table_name and query_name
- extra_field = TemplateField(
- name="table_name",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- value="",
- )
- extra_field2 = TemplateField(
- name="query_name",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- value="",
- )
- # Add supabase_url and supabase_service_key
- extra_field3 = TemplateField(
- name="supabase_url",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- value="",
- )
- extra_field4 = TemplateField(
- name="supabase_service_key",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- password=True,
- value="",
- )
- extra_fields.extend((extra_field, extra_field2, extra_field3, extra_field4))
-
- elif self.template.type_name == "MongoDBAtlasVectorSearch":
- self.display_name = "MongoDB Atlas"
-
- extra_field = TemplateField(
- name="mongodb_atlas_cluster_uri",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- display_name="MongoDB Atlas Cluster URI",
- value="",
- )
- extra_field2 = TemplateField(
- name="collection_name",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- display_name="Collection Name",
- value="",
- )
- extra_field3 = TemplateField(
- name="db_name",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- display_name="Database Name",
- value="",
- )
- extra_field4 = TemplateField(
- name="index_name",
- field_type="str",
- required=False,
- placeholder="",
- show=True,
- advanced=True,
- multiline=False,
- display_name="Index Name",
- value="",
- )
- extra_fields.extend((extra_field, extra_field2, extra_field3, extra_field4))
-
- if extra_fields:
- for field in extra_fields:
- self.template.add_field(field)
-
- def add_extra_base_classes(self) -> None:
- self.base_classes.extend(("BaseRetriever", "VectorStoreRetriever"))
-
- @staticmethod
- def format_field(field: TemplateField, name: Optional[str] = None) -> None:
- FrontendNode.format_field(field, name)
- # Define common field attributes
-
- # Check and set field attributes
- if field.name == "texts":
- # if field.name is "texts" it has to be replaced
- # when instantiating the vectorstores
- field.name = "documents"
-
- field.field_type = "Document"
- field.display_name = "Documents"
- field.required = False
- field.show = True
- field.advanced = False
- field.is_list = True
- elif field.name and "embedding" in field.name:
- # for backwards compatibility
- field.name = "embedding"
- field.required = True
- field.show = True
- field.advanced = False
- field.display_name = "Embedding"
- field.field_type = "Embeddings"
-
- elif field.name in BASIC_FIELDS:
- field.show = True
- field.advanced = False
- if field.name == "api_key":
- field.display_name = "API Key"
- field.password = True
- elif field.name == "location":
- field.value = ":memory:"
- field.placeholder = ":memory:"
-
- elif field.name in ADVANCED_FIELDS:
- field.show = True
- field.advanced = True
- if "key" in field.name:
- field.password = False
-
- elif field.name == "text_key":
- field.show = False
diff --git a/src/backend/base/langflow/utils/util.py b/src/backend/base/langflow/utils/util.py
index a4cce8ea3..bc7efc161 100644
--- a/src/backend/base/langflow/utils/util.py
+++ b/src/backend/base/langflow/utils/util.py
@@ -6,6 +6,8 @@ from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from docstring_parser import parse
+
+
from langflow.schema.schema import Record
from langflow.services.deps import get_settings_service
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
@@ -438,13 +440,13 @@ def update_settings(
settings_service.settings.update_from_yaml(config, dev=dev)
if remove_api_keys:
logger.debug(f"Setting remove_api_keys to {remove_api_keys}")
- settings_service.settings.update_settings(REMOVE_API_KEYS=remove_api_keys)
+ settings_service.settings.update_settings(remove_api_keys=remove_api_keys)
if cache:
logger.debug(f"Setting cache to {cache}")
- settings_service.settings.update_settings(CACHE=cache)
+ settings_service.settings.update_settings(cache=cache)
if components_path:
logger.debug(f"Adding component path {components_path}")
- settings_service.settings.update_settings(COMPONENTS_PATH=components_path)
+ settings_service.settings.update_settings(components_path=components_path)
if not store:
logger.debug("Setting store to False")
- settings_service.settings.update_settings(STORE=False)
+ settings_service.settings.update_settings(store=False)
diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock
index 653359b34..7a36a93cc 100644
--- a/src/backend/base/poetry.lock
+++ b/src/backend/base/poetry.lock
@@ -142,13 +142,13 @@ files = [
[[package]]
name = "anyio"
-version = "4.3.0"
+version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
- {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
- {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
+ {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
+ {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
]
[package.dependencies]
@@ -545,6 +545,26 @@ files = [
graph = ["objgraph (>=1.7.2)"]
profile = ["gprof2dot (>=2022.7.29)"]
+[[package]]
+name = "dnspython"
+version = "2.6.1"
+description = "DNS toolkit"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
+ {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
+]
+
+[package.extras]
+dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
+dnssec = ["cryptography (>=41)"]
+doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
+doq = ["aioquic (>=0.9.25)"]
+idna = ["idna (>=3.6)"]
+trio = ["trio (>=0.23)"]
+wmi = ["wmi (>=1.5.1)"]
+
[[package]]
name = "docstring-parser"
version = "0.15"
@@ -630,6 +650,21 @@ six = ">=1.9.0"
gmpy = ["gmpy"]
gmpy2 = ["gmpy2"]
+[[package]]
+name = "email-validator"
+version = "2.1.1"
+description = "A robust email address syntax and deliverability validation library."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"},
+ {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"},
+]
+
+[package.dependencies]
+dnspython = ">=2.0.0"
+idna = ">=2.0.0"
+
[[package]]
name = "emoji"
version = "2.12.1"
@@ -663,23 +698,48 @@ test = ["pytest (>=6)"]
[[package]]
name = "fastapi"
-version = "0.110.3"
+version = "0.111.0"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
files = [
- {file = "fastapi-0.110.3-py3-none-any.whl", hash = "sha256:fd7600612f755e4050beb74001310b5a7e1796d149c2ee363124abdfa0289d32"},
- {file = "fastapi-0.110.3.tar.gz", hash = "sha256:555700b0159379e94fdbfc6bb66a0f1c43f4cf7060f25239af3d84b63a656626"},
+ {file = "fastapi-0.111.0-py3-none-any.whl", hash = "sha256:97ecbf994be0bcbdadedf88c3150252bed7b2087075ac99735403b1b76cc8fc0"},
+ {file = "fastapi-0.111.0.tar.gz", hash = "sha256:b9db9dd147c91cb8b769f7183535773d8741dd46f9dc6676cd82eab510228cd7"},
]
[package.dependencies]
+email_validator = ">=2.0.0"
+fastapi-cli = ">=0.0.2"
+httpx = ">=0.23.0"
+jinja2 = ">=2.11.2"
+orjson = ">=3.2.1"
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
+python-multipart = ">=0.0.7"
starlette = ">=0.37.2,<0.38.0"
typing-extensions = ">=4.8.0"
+ujson = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0"
+uvicorn = {version = ">=0.12.0", extras = ["standard"]}
[package.extras]
all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
+[[package]]
+name = "fastapi-cli"
+version = "0.0.4"
+description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fastapi_cli-0.0.4-py3-none-any.whl", hash = "sha256:a2552f3a7ae64058cdbb530be6fa6dbfc975dc165e4fa66d224c3d396e25e809"},
+ {file = "fastapi_cli-0.0.4.tar.gz", hash = "sha256:e2e9ffaffc1f7767f488d6da34b6f5a377751c996f397902eb6abb99a67bde32"},
+]
+
+[package.dependencies]
+typer = ">=0.12.3"
+
+[package.extras]
+standard = ["fastapi", "uvicorn[standard] (>=0.15.0)"]
+
[[package]]
name = "frozenlist"
version = "1.4.1"
@@ -890,6 +950,54 @@ http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
trio = ["trio (>=0.22.0,<0.26.0)"]
+[[package]]
+name = "httptools"
+version = "0.6.1"
+description = "A collection of framework independent HTTP protocol utils."
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"},
+ {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"},
+ {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"},
+ {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"},
+ {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"},
+ {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"},
+ {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"},
+ {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"},
+ {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"},
+ {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"},
+ {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"},
+ {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"},
+ {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"},
+ {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"},
+ {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"},
+ {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"},
+ {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"},
+ {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"},
+ {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"},
+ {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"},
+ {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"},
+ {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"},
+ {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"},
+ {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"},
+ {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"},
+ {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"},
+ {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"},
+ {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"},
+ {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"},
+ {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"},
+ {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"},
+ {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"},
+ {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"},
+ {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"},
+ {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"},
+ {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"},
+]
+
+[package.extras]
+test = ["Cython (>=0.29.24,<0.30.0)"]
+
[[package]]
name = "httpx"
version = "0.27.0"
@@ -925,6 +1033,23 @@ files = [
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
]
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
[[package]]
name = "jq"
version = "1.7.0"
@@ -1034,22 +1159,20 @@ files = [
[[package]]
name = "langchain"
-version = "0.1.20"
+version = "0.2.1"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain-0.1.20-py3-none-any.whl", hash = "sha256:09991999fbd6c3421a12db3c7d1f52d55601fc41d9b2a3ef51aab2e0e9c38da9"},
- {file = "langchain-0.1.20.tar.gz", hash = "sha256:f35c95eed8c8375e02dce95a34f2fd4856a4c98269d6dc34547a23dba5beab7e"},
+ {file = "langchain-0.2.1-py3-none-any.whl", hash = "sha256:3e13bf97c5717bce2c281f5117e8778823e8ccf62d949e73d3869448962b1c97"},
+ {file = "langchain-0.2.1.tar.gz", hash = "sha256:5758a315e1ac92eb26dafec5ad0fafa03cafa686aba197d5bb0b1dd28cc03ebe"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
-dataclasses-json = ">=0.5.7,<0.7"
-langchain-community = ">=0.0.38,<0.1"
-langchain-core = ">=0.1.52,<0.2.0"
-langchain-text-splitters = ">=0.0.1,<0.1"
+langchain-core = ">=0.2.0,<0.3.0"
+langchain-text-splitters = ">=0.2.0,<0.3.0"
langsmith = ">=0.1.17,<0.2.0"
numpy = ">=1,<2"
pydantic = ">=1,<3"
@@ -1065,28 +1188,29 @@ cli = ["typer (>=0.9.0,<0.10.0)"]
cohere = ["cohere (>=4,<6)"]
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
embeddings = ["sentence-transformers (>=2,<3)"]
-extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
+extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.1,<0.2)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
javascript = ["esprima (>=4.0.1,<5.0.0)"]
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
-openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"]
+openai = ["openai (<2)", "tiktoken (>=0.7,<1.0)"]
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
[[package]]
name = "langchain-community"
-version = "0.0.38"
+version = "0.2.1"
description = "Community contributed LangChain integrations."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_community-0.0.38-py3-none-any.whl", hash = "sha256:ecb48660a70a08c90229be46b0cc5f6bc9f38f2833ee44c57dfab9bf3a2c121a"},
- {file = "langchain_community-0.0.38.tar.gz", hash = "sha256:127fc4b75bc67b62fe827c66c02e715a730fef8fe69bd2023d466bab06b5810d"},
+ {file = "langchain_community-0.2.1-py3-none-any.whl", hash = "sha256:b834e2c5ded6903b839fcaf566eee90a0ffae53405a0f7748202725e701d39cd"},
+ {file = "langchain_community-0.2.1.tar.gz", hash = "sha256:079942e8f15da975769ccaae19042b7bba5481c42020bbbd7d8cad73a9393261"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
dataclasses-json = ">=0.5.7,<0.7"
-langchain-core = ">=0.1.52,<0.2.0"
+langchain = ">=0.2.0,<0.3.0"
+langchain-core = ">=0.2.0,<0.3.0"
langsmith = ">=0.1.0,<0.2.0"
numpy = ">=1,<2"
PyYAML = ">=5.3"
@@ -1096,17 +1220,17 @@ tenacity = ">=8.1.0,<9.0.0"
[package.extras]
cli = ["typer (>=0.9.0,<0.10.0)"]
-extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
+extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpathlib (>=0.18,<0.19)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
[[package]]
name = "langchain-core"
-version = "0.1.52"
+version = "0.2.1"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_core-0.1.52-py3-none-any.whl", hash = "sha256:62566749c92e8a1181c255c788548dc16dbc319d896cd6b9c95dc17af9b2a6db"},
- {file = "langchain_core-0.1.52.tar.gz", hash = "sha256:084c3fc452f5a6966c28ab3ec5dbc8b8d26fc3f63378073928f4e29d90b6393f"},
+ {file = "langchain_core-0.2.1-py3-none-any.whl", hash = "sha256:3521e1e573988c47399fca9739270c5d34f8ecec147253ad829eb9ff288f76d5"},
+ {file = "langchain_core-0.2.1.tar.gz", hash = "sha256:49383126168d934559a543ce812c485048d9e6ac9b6798fbf3d4a72b6bba5b0c"},
]
[package.dependencies]
@@ -1122,48 +1246,48 @@ extended-testing = ["jinja2 (>=3,<4)"]
[[package]]
name = "langchain-experimental"
-version = "0.0.58"
+version = "0.0.59"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_experimental-0.0.58-py3-none-any.whl", hash = "sha256:106d3bc7df3dd20687378db7534c2fc21e2589201d43de42f832a1e3913dd55b"},
- {file = "langchain_experimental-0.0.58.tar.gz", hash = "sha256:8ef10ff6b39f44ef468f8f21beb3749957d2262ec64d05db2719934936ca0285"},
+ {file = "langchain_experimental-0.0.59-py3-none-any.whl", hash = "sha256:d6ceb586c15ad35fc619542e86d01f0984a94985324a78a9ed8cd87615ff265d"},
+ {file = "langchain_experimental-0.0.59.tar.gz", hash = "sha256:3a93f5c328f6ee1cd4f9dd8792c535df2d5638cff0d778ee25546804b5282fda"},
]
[package.dependencies]
-langchain = ">=0.1.17,<0.2.0"
-langchain-core = ">=0.1.52,<0.2.0"
+langchain-community = ">=0.2,<0.3"
+langchain-core = ">=0.2,<0.3"
[package.extras]
extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "pandas (>=2.0.1,<3.0.0)", "presidio-analyzer (>=2.2.352,<3.0.0)", "presidio-anonymizer (>=2.2.352,<3.0.0)", "sentence-transformers (>=2,<3)", "tabulate (>=0.9.0,<0.10.0)", "vowpal-wabbit-next (==0.6.0)"]
[[package]]
name = "langchain-text-splitters"
-version = "0.0.2"
+version = "0.2.0"
description = "LangChain text splitting utilities"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d"},
- {file = "langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"},
+ {file = "langchain_text_splitters-0.2.0-py3-none-any.whl", hash = "sha256:7b4c6a45f8471630a882b321e138329b6897102a5bc62f4c12be1c0b05bb9199"},
+ {file = "langchain_text_splitters-0.2.0.tar.gz", hash = "sha256:b32ab4f7397f7d42c1fa3283fefc2547ba356bd63a68ee9092865e5ad83c82f9"},
]
[package.dependencies]
-langchain-core = ">=0.1.28,<0.3"
+langchain-core = ">=0.2.0,<0.3.0"
[package.extras]
extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
[[package]]
name = "langchainhub"
-version = "0.1.15"
+version = "0.1.16"
description = "The LangChain Hub API client"
optional = false
-python-versions = ">=3.8.1,<4.0"
+python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchainhub-0.1.15-py3-none-any.whl", hash = "sha256:89a0951abd1db255e91c6d545d092a598fc255aa865d1ffc3ce8f93bbeae60e7"},
- {file = "langchainhub-0.1.15.tar.gz", hash = "sha256:fa3ff81a31946860f84c119f1e2f6b7c7707e2bd7ed2394a7313b286d59f3bda"},
+ {file = "langchainhub-0.1.16-py3-none-any.whl", hash = "sha256:a4379a1879cc6b441b8d02cc65e28a54f160fba61c9d1d4b0eddc3a276dff99a"},
+ {file = "langchainhub-0.1.16.tar.gz", hash = "sha256:9f11e68fddb575e70ef4b28800eedbd9eeb180ba508def04f7153ea5b246b6fc"},
]
[package.dependencies]
@@ -1172,13 +1296,13 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langsmith"
-version = "0.1.62"
+version = "0.1.63"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langsmith-0.1.62-py3-none-any.whl", hash = "sha256:3a9f112643f64d736b8c875390c750fe6485804ea53aeae4edebce0afa4383a5"},
- {file = "langsmith-0.1.62.tar.gz", hash = "sha256:7ef894c14e6d4175fce88ec3bcd5a9c8cf9a456ea77e26e361f519ad082f34a8"},
+ {file = "langsmith-0.1.63-py3-none-any.whl", hash = "sha256:7810afdf5e3f3b472fc581a29371fb96cd843dde2149e048d1b9610325159d1e"},
+ {file = "langsmith-0.1.63.tar.gz", hash = "sha256:a609405b52f6f54df442a142cbf19ab38662d54e532f96028b4c546434d4afdf"},
]
[package.dependencies]
@@ -2142,6 +2266,16 @@ docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"]
image = ["Pillow (>=8.0.0)"]
+[[package]]
+name = "pyperclip"
+version = "1.8.2"
+description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"},
+]
+
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
@@ -2522,13 +2656,13 @@ sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "sqlmodel"
-version = "0.0.16"
+version = "0.0.18"
description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
optional = false
-python-versions = ">=3.7,<4.0"
+python-versions = ">=3.7"
files = [
- {file = "sqlmodel-0.0.16-py3-none-any.whl", hash = "sha256:b972f5d319580d6c37ecc417881f6ec4d1ad3ed3583d0ac0ed43234a28bf605a"},
- {file = "sqlmodel-0.0.16.tar.gz", hash = "sha256:966656f18a8e9a2d159eb215b07fb0cf5222acfae3362707ca611848a8a06bd1"},
+ {file = "sqlmodel-0.0.18-py3-none-any.whl", hash = "sha256:d70fdf8fe595e30a918660cf4537b9c5fc2fffdbfcba851a0135de73c3ebcbb7"},
+ {file = "sqlmodel-0.0.18.tar.gz", hash = "sha256:2e520efe03810ef2c268a1004cfc5ef8f8a936312232f38d6c8e62c11af2cac3"},
]
[package.dependencies]
@@ -2600,13 +2734,13 @@ urllib3 = ">=2"
[[package]]
name = "typing-extensions"
-version = "4.11.0"
+version = "4.12.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
- {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
+ {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"},
+ {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"},
]
[[package]]
@@ -2635,6 +2769,93 @@ files = [
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
]
+[[package]]
+name = "ujson"
+version = "5.10.0"
+description = "Ultra fast JSON encoder and decoder for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"},
+ {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"},
+ {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"},
+ {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"},
+ {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"},
+ {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"},
+ {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"},
+ {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"},
+ {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"},
+ {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"},
+ {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"},
+ {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"},
+ {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"},
+ {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"},
+ {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"},
+ {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"},
+ {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"},
+ {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"},
+ {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"},
+ {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"},
+ {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"},
+ {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"},
+ {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"},
+ {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"},
+ {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"},
+ {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"},
+ {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"},
+ {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"},
+ {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"},
+ {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"},
+ {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"},
+ {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"},
+ {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"},
+ {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"},
+ {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"},
+ {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"},
+ {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"},
+ {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"},
+ {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"},
+ {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"},
+ {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"},
+ {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"},
+ {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"},
+ {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"},
+ {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"},
+ {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"},
+ {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"},
+ {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"},
+ {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"},
+ {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"},
+ {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"},
+ {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"},
+ {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"},
+ {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"},
+ {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"},
+ {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"},
+ {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"},
+ {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"},
+ {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"},
+ {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"},
+ {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"},
+ {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"},
+ {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"},
+ {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"},
+ {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"},
+ {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"},
+ {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"},
+ {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"},
+ {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"},
+ {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"},
+ {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"},
+ {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"},
+ {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"},
+ {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"},
+ {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"},
+ {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"},
+ {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"},
+ {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"},
+]
+
[[package]]
name = "urllib3"
version = "2.2.1"
@@ -2665,12 +2886,150 @@ files = [
[package.dependencies]
click = ">=7.0"
+colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""}
h11 = ">=0.8"
+httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
+python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
+pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
+uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
+watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
+websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
[package.extras]
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
+[[package]]
+name = "uvloop"
+version = "0.19.0"
+description = "Fast implementation of asyncio event loop on top of libuv"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"},
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"},
+ {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"},
+ {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"},
+ {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"},
+ {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"},
+ {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"},
+ {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"},
+ {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"},
+ {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"},
+ {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"},
+ {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"},
+ {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"},
+ {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"},
+ {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"},
+]
+
+[package.extras]
+docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
+test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"]
+
+[[package]]
+name = "watchfiles"
+version = "0.22.0"
+description = "Simple, modern and high performance file watching and code reload in python."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"},
+ {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"},
+ {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"},
+ {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"},
+ {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"},
+ {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"},
+ {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"},
+ {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"},
+ {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"},
+ {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"},
+ {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"},
+ {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"},
+ {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"},
+ {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"},
+ {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"},
+ {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"},
+ {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"},
+ {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"},
+ {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"},
+ {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"},
+ {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"},
+ {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"},
+ {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"},
+ {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"},
+ {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"},
+ {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"},
+ {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"},
+ {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"},
+ {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"},
+ {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"},
+ {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"},
+ {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"},
+ {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"},
+ {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"},
+ {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"},
+ {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"},
+ {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"},
+ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"},
+ {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"},
+ {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"},
+ {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"},
+]
+
+[package.dependencies]
+anyio = ">=3.0.0"
+
[[package]]
name = "websockets"
version = "12.0"
@@ -2891,4 +3250,4 @@ local = []
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "200c17e119f7ba7fdb64de320bdf464c65daf06f62bcc41258b32247a99e3dc1"
+content-hash = "31d8e5ce045ef7d94e63058559b5f8181e6b51fc923c4904f45481443d59235d"
diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml
index b59196fa2..ce7557464 100644
--- a/src/backend/base/pyproject.toml
+++ b/src/backend/base/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow-base"
-version = "0.0.47"
+version = "0.0.49"
description = "A Python package with a built-in web application"
authors = ["Langflow "]
maintainers = [
@@ -26,18 +26,18 @@ langflow-base = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.10,<3.13"
-fastapi = "^0.110.1"
+fastapi = "^0.111.0"
httpx = "*"
uvicorn = "^0.29.0"
gunicorn = "^22.0.0"
-langchain = "~0.1.16"
+langchain = "~0.2.0"
langchainhub = "~0.1.15"
-sqlmodel = "^0.0.16"
+sqlmodel = "^0.0.18"
loguru = "^0.7.1"
rich = "^13.7.0"
langchain-experimental = "*"
-pydantic = "^2.5.0"
-pydantic-settings = "^2.1.0"
+pydantic = "^2.7.0"
+pydantic-settings = "^2.2.0"
websockets = "*"
typer = "^0.12.0"
cachetools = "^5.3.1"
@@ -56,11 +56,12 @@ duckdb = "^0.10.2"
python-socketio = "^5.11.0"
python-docx = "^1.1.0"
jq = { version = "^1.7.0", markers = "sys_platform != 'win32'" }
-pypdf = "^4.1.0"
+pypdf = "^4.2.0"
nest-asyncio = "^1.6.0"
-emoji = "^2.11.0"
+emoji = "^2.12.0"
cryptography = "^42.0.5"
asyncer = "^0.0.5"
+pyperclip = "^1.8.2"
[tool.poetry.extras]
diff --git a/example.har b/src/frontend/harFiles/example.har
similarity index 100%
rename from example.har
rename to src/frontend/harFiles/example.har
diff --git a/src/frontend/src/App.tsx b/src/frontend/src/App.tsx
index db35073e8..922ec9994 100644
--- a/src/frontend/src/App.tsx
+++ b/src/frontend/src/App.tsx
@@ -15,6 +15,7 @@ import {
} from "./constants/constants";
import { AuthContext } from "./contexts/authContext";
import { autoLogin, getGlobalVariables, getHealth } from "./controllers/API";
+import useTrackLastVisitedPath from "./hooks/use-track-last-visited-path";
import Router from "./routes";
import useAlertStore from "./stores/alertStore";
import { useDarkStore } from "./stores/darkStore";
@@ -24,6 +25,8 @@ import { useGlobalVariablesStore } from "./stores/globalVariablesStore/globalVar
import { useStoreStore } from "./stores/storeStore";
import { useTypesStore } from "./stores/typesStore";
export default function App() {
+ useTrackLastVisitedPath();
+
const removeFromTempNotificationList = useAlertStore(
(state) => state.removeFromTempNotificationList,
);
@@ -104,7 +107,6 @@ export default function App() {
const fetchAllData = async () => {
setTimeout(async () => {
await Promise.all([refreshStars(), refreshVersion(), fetchData()]);
- getFoldersApi();
}, 1000);
};
@@ -112,6 +114,7 @@ export default function App() {
return new Promise(async (resolve, reject) => {
if (isAuthenticated) {
try {
+ await getFoldersApi();
await getTypes();
await refreshFlows();
const res = await getGlobalVariables();
@@ -208,7 +211,7 @@ export default function App() {
{tempNotificationList.map((alert) => (
- {alert.type === "error" && (
+ {alert.type === "error" ? (
- )}
-
- ))}
-
-
- {tempNotificationList.map((alert) => (
-
- {alert.type === "notice" ? (
-
) : (
- alert.type === "success" && (
-
@@ -244,6 +233,20 @@ export default function App() {
))}
+
+ {tempNotificationList.map((alert) => (
+
+ {alert.type === "success" && (
+
+ )}
+
+ ))}
+
);
diff --git a/src/frontend/src/components/addNewVariableButtonComponent/utils/sort-by-name.tsx b/src/frontend/src/components/addNewVariableButtonComponent/utils/sort-by-name.tsx
index 96a1b6b68..f3dc06453 100644
--- a/src/frontend/src/components/addNewVariableButtonComponent/utils/sort-by-name.tsx
+++ b/src/frontend/src/components/addNewVariableButtonComponent/utils/sort-by-name.tsx
@@ -1,3 +1,3 @@
export default function sortByName(stringList: string[]): string[] {
return stringList.sort((a, b) => a.localeCompare(b));
-}
\ No newline at end of file
+}
diff --git a/src/frontend/src/components/cardComponent/utils/convert-test-name.tsx b/src/frontend/src/components/cardComponent/utils/convert-test-name.tsx
index ac8800540..068b7b585 100644
--- a/src/frontend/src/components/cardComponent/utils/convert-test-name.tsx
+++ b/src/frontend/src/components/cardComponent/utils/convert-test-name.tsx
@@ -1,3 +1,3 @@
export function convertTestName(name: string): string {
- return name.replace(/ /g, "-").toLowerCase();
+ return name.replace(/ /g, "-").toLowerCase();
}
diff --git a/src/frontend/src/components/headerComponent/components/menuBar/index.tsx b/src/frontend/src/components/headerComponent/components/menuBar/index.tsx
index b5a115e1e..41f7b6d5b 100644
--- a/src/frontend/src/components/headerComponent/components/menuBar/index.tsx
+++ b/src/frontend/src/components/headerComponent/components/menuBar/index.tsx
@@ -8,7 +8,6 @@ import {
} from "../../../ui/dropdown-menu";
import { useNavigate } from "react-router-dom";
-import { Node } from "reactflow";
import { UPLOAD_ERROR_ALERT } from "../../../../constants/alerts_constants";
import { SAVED_HOVER } from "../../../../constants/constants";
import ExportModal from "../../../../modals/exportModal";
@@ -22,11 +21,7 @@ import IconComponent from "../../../genericIconComponent";
import ShadTooltip from "../../../shadTooltipComponent";
import { Button } from "../../../ui/button";
-export const MenuBar = ({
- removeFunction,
-}: {
- removeFunction: (nodes: Node[]) => void;
-}): JSX.Element => {
+export const MenuBar = ({}: {}): JSX.Element => {
const addFlow = useFlowsManagerStore((state) => state.addFlow);
const currentFlow = useFlowsManagerStore((state) => state.currentFlow);
const setErrorData = useAlertStore((state) => state.setErrorData);
@@ -36,7 +31,6 @@ export const MenuBar = ({
const saveLoading = useFlowsManagerStore((state) => state.saveLoading);
const [openSettings, setOpenSettings] = useState(false);
const [openLogs, setOpenLogs] = useState(false);
- const nodes = useFlowStore((state) => state.nodes);
const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow);
const navigate = useNavigate();
const isBuilding = useFlowStore((state) => state.isBuilding);
@@ -72,14 +66,6 @@ export const MenuBar = ({
return currentFlow ? (
-
{
- removeFunction(nodes);
- navigate("/");
- }}
- >
-
-
diff --git a/src/frontend/src/components/headerComponent/index.tsx b/src/frontend/src/components/headerComponent/index.tsx
index 2467fdcfe..4f6c02bd8 100644
--- a/src/frontend/src/components/headerComponent/index.tsx
+++ b/src/frontend/src/components/headerComponent/index.tsx
@@ -3,14 +3,17 @@ import { FaDiscord, FaGithub } from "react-icons/fa";
import { RiTwitterXFill } from "react-icons/ri";
import { Link, useLocation, useNavigate, useParams } from "react-router-dom";
import AlertDropdown from "../../alerts/alertDropDown";
-import { USER_PROJECTS_HEADER } from "../../constants/constants";
+import {
+ LOCATIONS_TO_RETURN,
+ USER_PROJECTS_HEADER,
+} from "../../constants/constants";
import { AuthContext } from "../../contexts/authContext";
-import { Node } from "reactflow";
import useAlertStore from "../../stores/alertStore";
import { useDarkStore } from "../../stores/darkStore";
import useFlowStore from "../../stores/flowStore";
import useFlowsManagerStore from "../../stores/flowsManagerStore";
+import { useLocationStore } from "../../stores/locationStore";
import { useStoreStore } from "../../stores/storeStore";
import { gradients } from "../../utils/styleUtils";
import IconComponent from "../genericIconComponent";
@@ -29,6 +32,7 @@ import MenuBar from "./components/menuBar";
export default function Header(): JSX.Element {
const notificationCenter = useAlertStore((state) => state.notificationCenter);
const location = useLocation();
+
const { logout, autoLogin, isAdmin, userData } = useContext(AuthContext);
const navigate = useNavigate();
const removeFlow = useFlowsManagerStore((store) => store.removeFlow);
@@ -40,20 +44,56 @@ export default function Header(): JSX.Element {
const setDark = useDarkStore((state) => state.setDark);
const stars = useDarkStore((state) => state.stars);
- async function checkForChanges(nodes: Node[]): Promise {
+ const routeHistory = useLocationStore((state) => state.routeHistory);
+
+ async function checkForChanges(): Promise {
if (nodes.length === 0) {
await removeFlow(id!);
}
}
+ const redirectToLastLocation = () => {
+ const lastFlowVisitedIndex = routeHistory
+ .reverse()
+ .findIndex(
+ (path) => path.includes("/flow/") && path !== location.pathname,
+ );
+
+ const lastFlowVisited = routeHistory[lastFlowVisitedIndex];
+ lastFlowVisited && !location.pathname.includes("/flow")
+ ? navigate(lastFlowVisited)
+ : navigate("/all");
+ };
+
+ const visitedFlowPathBefore = () => {
+ const lastThreeVisitedPaths = routeHistory.slice(-3);
+ return lastThreeVisitedPaths.some((path) => path.includes("/flow/"));
+ };
+
+ const showArrowReturnIcon =
+ LOCATIONS_TO_RETURN.some((path) => location.pathname.includes(path)) &&
+ visitedFlowPathBefore();
+
return (
- checkForChanges(nodes)}>
+
⛓️
-
+ {showArrowReturnIcon && (
+ {
+ checkForChanges();
+ redirectToLastLocation();
+ }}
+ >
+
+
+ )}
+
+
+
{
- checkForChanges(nodes);
- }}
+ onClick={checkForChanges}
>
{USER_PROJECTS_HEADER}
@@ -80,9 +118,7 @@ export default function Header(): JSX.Element {
className="gap-2"
variant={location.pathname === "/store" ? "primary" : "secondary"}
size="sm"
- onClick={() => {
- checkForChanges(nodes);
- }}
+ onClick={checkForChanges}
data-testid="button-store"
>
diff --git a/src/frontend/src/components/inputComponent/index.tsx b/src/frontend/src/components/inputComponent/index.tsx
index 4be2a48e4..8fb749279 100644
--- a/src/frontend/src/components/inputComponent/index.tsx
+++ b/src/frontend/src/components/inputComponent/index.tsx
@@ -33,6 +33,8 @@ export default function InputComponent({
optionButton,
objectOptions,
isObjectOption = false,
+ name,
+ onChangeFolderName,
}: InputComponentType): JSX.Element {
const [pwdVisible, setPwdVisible] = useState(false);
const refInput = useRef(null);
@@ -53,6 +55,7 @@ export default function InputComponent({
{isForm ? (
{
+ if (onChangeFolderName) {
+ return onChangeFolderName(e);
+ }
onChange && onChange(e.target.value);
}}
onCopy={(e) => {
diff --git a/src/frontend/src/components/sidebarComponent/components/sideBarButtons/index.tsx b/src/frontend/src/components/sidebarComponent/components/sideBarButtons/index.tsx
index 9e4aa9400..24282a2b3 100644
--- a/src/frontend/src/components/sidebarComponent/components/sideBarButtons/index.tsx
+++ b/src/frontend/src/components/sidebarComponent/components/sideBarButtons/index.tsx
@@ -11,10 +11,7 @@ type SideBarButtonsComponentProps = {
pathname: string;
handleOpenNewFolderModal?: () => void;
};
-const SideBarButtonsComponent = ({
- items,
- handleOpenNewFolderModal,
-}: SideBarButtonsComponentProps) => {
+const SideBarButtonsComponent = ({ items }: SideBarButtonsComponentProps) => {
return (
<>
{items.map((item) => (
@@ -26,7 +23,6 @@ const SideBarButtonsComponent = ({
buttonVariants({ variant: "ghost" }),
"!w-[200px] cursor-pointer justify-start gap-2 border border-transparent hover:border-border hover:bg-transparent",
)}
- onClick={handleOpenNewFolderModal}
>
{item.title}
diff --git a/src/frontend/src/components/sidebarComponent/components/sideBarFolderButtons/index.tsx b/src/frontend/src/components/sidebarComponent/components/sideBarFolderButtons/index.tsx
index ee83a34e6..8a1dc4a29 100644
--- a/src/frontend/src/components/sidebarComponent/components/sideBarFolderButtons/index.tsx
+++ b/src/frontend/src/components/sidebarComponent/components/sideBarFolderButtons/index.tsx
@@ -1,16 +1,18 @@
+import { useEffect, useRef, useState } from "react";
import { useLocation } from "react-router-dom";
import { FolderType } from "../../../../pages/MainPage/entities";
+import { addFolder, updateFolder } from "../../../../pages/MainPage/services";
+import { handleDownloadFolderFn } from "../../../../pages/MainPage/utils/handle-download-folder";
+import useFlowsManagerStore from "../../../../stores/flowsManagerStore";
import { useFolderStore } from "../../../../stores/foldersStore";
+import { handleKeyDown } from "../../../../utils/reactflowUtils";
import { cn } from "../../../../utils/utils";
-import DropdownButton from "../../../dropdownButtonComponent";
import IconComponent, {
ForwardedIconComponent,
} from "../../../genericIconComponent";
import { Button, buttonVariants } from "../../../ui/button";
+import { Input } from "../../../ui/input";
import useFileDrop from "../../hooks/use-on-file-drop";
-import useFlowsManagerStore from "../../../../stores/flowsManagerStore";
-import { handleDownloadFolderFn } from "../../../../pages/MainPage/utils/handle-download-folder";
-import useAlertStore from "../../../../stores/alertStore";
type SideBarFoldersButtonsComponentProps = {
folders: FolderType[];
@@ -18,22 +20,27 @@ type SideBarFoldersButtonsComponentProps = {
handleChangeFolder?: (id: string) => void;
handleEditFolder?: (item: FolderType) => void;
handleDeleteFolder?: (item: FolderType) => void;
- handleAddFolder?: () => void;
};
const SideBarFoldersButtonsComponent = ({
- folders,
pathname,
- handleAddFolder,
handleChangeFolder,
handleEditFolder,
handleDeleteFolder,
}: SideBarFoldersButtonsComponentProps) => {
+ const refInput = useRef
(null);
+ const setFolders = useFolderStore((state) => state.setFolders);
+ const folders = useFolderStore((state) => state.folders);
+ const [foldersNames, setFoldersNames] = useState({});
+ const takeSnapshot = useFlowsManagerStore((state) => state.takeSnapshot);
+ const [editFolders, setEditFolderName] = useState(
+ folders.map((obj) => ({ name: obj.name, edit: false })),
+ );
const uploadFolder = useFolderStore((state) => state.uploadFolder);
const currentFolder = pathname.split("/");
const urlWithoutPath = pathname.split("/").length < 4;
const myCollectionId = useFolderStore((state) => state.myCollectionId);
- const allFlows = useFlowsManagerStore((state) => state.allFlows);
- const setErrorData = useAlertStore((state) => state.setErrorData);
+ const getFoldersApi = useFolderStore((state) => state.getFoldersApi);
+ const folderIdDragging = useFolderStore((state) => state.folderIdDragging);
const checkPathName = (itemId: string) => {
if (urlWithoutPath && itemId === myCollectionId) {
@@ -62,20 +69,44 @@ const SideBarFoldersButtonsComponent = ({
handleDownloadFolderFn(id);
};
+ function addNewFolder() {
+ addFolder({ name: "New Folder", parent_id: null, description: "" }).then(
+ (res) => {
+ getFoldersApi(true);
+ },
+ );
+ }
+
+ function handleEditFolderName(e, name): void {
+ const {
+ target: { value },
+ } = e;
+ setFoldersNames((old) => ({
+ ...old,
+ [name]: value,
+ }));
+ }
+
+ useEffect(() => {
+ folders.map((obj) => ({ name: obj.name, edit: false }));
+ }, [folders]);
+
+ console.log(folderId, folderIdDragging);
+
return (
<>
-
+
+
+ New Folder
+
<>
- {folders.map((item, index) => (
- onDrop(e, item.id!)}
- key={item.id}
- data-testid={`sidebar-nav-${item.name}`}
- className={cn(
- buttonVariants({ variant: "ghost" }),
- checkPathName(item.id!)
- ? "border border-border bg-muted hover:bg-muted"
- : "border hover:bg-transparent lg:border-transparent lg:hover:border-border",
- "group flex w-full shrink-0 cursor-pointer gap-2 opacity-100 lg:min-w-full",
- )}
- onClick={() => handleChangeFolder!(item.id!)}
- >
-
-
-
- {item.name}
-
-
- {index > 0 && (
-
{
- handleDeleteFolder!(item);
- e.stopPropagation();
- e.preventDefault();
- }}
- variant={"ghost"}
- >
-
-
+ {folders.map((item, index) => {
+ const editFolderName = editFolders?.filter(
+ (folder) => folder.name === item.name,
+ )[0];
+ return (
+
dragOver(e, item.id!)}
+ onDragEnter={(e) => dragEnter(e, item.id!)}
+ onDragLeave={dragLeave}
+ onDrop={(e) => onDrop(e, item.id!)}
+ key={item.id}
+ data-testid={`sidebar-nav-${item.name}`}
+ className={cn(
+ buttonVariants({ variant: "ghost" }),
+ checkPathName(item.id!)
+ ? "border border-border bg-muted hover:bg-muted"
+ : "border hover:bg-transparent lg:border-transparent lg:hover:border-border",
+ "group flex w-full shrink-0 cursor-pointer gap-2 opacity-100 lg:min-w-full",
+ folderIdDragging === item.id! ? "bg-border" : "",
)}
- {index > 0 && (
+ onClick={() => handleChangeFolder!(item.id!)}
+ >
+
{
+ if (item.name === "My Projects") {
+ return;
+ }
+
+ if (!foldersNames[item.name]) {
+ setFoldersNames({ [item.name]: item.name });
+ }
+
+ if (
+ editFolders.find((obj) => obj.name === item.name)?.name
+ ) {
+ const newEditFolders = editFolders.map((obj) => {
+ if (obj.name === item.name) {
+ return { name: item.name, edit: true };
+ }
+ return { name: obj.name, edit: false };
+ });
+ setEditFolderName(newEditFolders);
+ takeSnapshot();
+ event.stopPropagation();
+ event.preventDefault();
+ return;
+ }
+
+ setEditFolderName((old) => [
+ ...old,
+ { name: item.name, edit: true },
+ ]);
+ setFoldersNames((oldFolder) => ({
+ ...oldFolder,
+ [item.name]: item.name,
+ }));
+ takeSnapshot();
+ event.stopPropagation();
+ event.preventDefault();
+ }}
+ className="flex w-full items-center gap-2"
+ >
+
+ {editFolderName?.edit ? (
+
+ {
+ handleEditFolderName(e, item.name);
+ }}
+ ref={refInput}
+ onKeyDown={(e) => {
+ if (e.key === "Escape") {
+ const newEditFolders = editFolders.map((obj) => {
+ if (obj.name === item.name) {
+ return { name: item.name, edit: false };
+ }
+ return { name: obj.name, edit: false };
+ });
+ setEditFolderName(newEditFolders);
+ setFoldersNames({});
+ setEditFolderName(
+ folders.map((obj) => ({
+ name: obj.name,
+ edit: false,
+ })),
+ );
+ }
+ if (e.key === "Enter") {
+ refInput.current?.blur();
+ }
+ handleKeyDown(e, e.key, "");
+ }}
+ autoFocus={true}
+ onBlur={async () => {
+ const newEditFolders = editFolders.map((obj) => {
+ if (obj.name === item.name) {
+ return { name: item.name, edit: false };
+ }
+ return { name: obj.name, edit: false };
+ });
+ setEditFolderName(newEditFolders);
+ if (foldersNames[item.name].trim() !== "") {
+ setFoldersNames((old) => ({
+ ...old,
+ [item.name]: foldersNames[item.name],
+ }));
+ const body = {
+ ...item,
+ name: foldersNames[item.name],
+ flows: item.flows?.length > 0 ? item.flows : [],
+ components:
+ item.components?.length > 0
+ ? item.components
+ : [],
+ };
+ const updatedFolder = await updateFolder(
+ body,
+ item.id!,
+ );
+ const updateFolders = folders.filter(
+ (f) => f.name !== item.name,
+ );
+ setFolders([...updateFolders, updatedFolder]);
+ setFoldersNames({});
+ setEditFolderName(
+ folders.map((obj) => ({
+ name: obj.name,
+ edit: false,
+ })),
+ );
+ } else {
+ setFoldersNames((old) => ({
+ ...old,
+ [item.name]: item.name,
+ }));
+ }
+ }}
+ value={foldersNames[item.name]}
+ id={`input-folder-${item.name}`}
+ />
+
+ ) : (
+
+ {item.name}
+
+ )}
+
+ {index > 0 && (
+
{
+ handleDeleteFolder!(item);
+ e.stopPropagation();
+ e.preventDefault();
+ }}
+ variant={"ghost"}
+ >
+
+
+ )}
+ {/* {index > 0 && (
+
{
+ e.stopPropagation();
+ e.preventDefault();
+ }}
+ variant={"ghost"}
+ >
+
+
+ )} */}
{
- handleEditFolder!(item);
+ handleDownloadFolder(item.id!);
e.stopPropagation();
e.preventDefault();
}}
variant={"ghost"}
>
- )}
-
{
- handleDownloadFolder(item.id!);
- e.stopPropagation();
- e.preventDefault();
- }}
- variant={"ghost"}
- >
-
-
+
-
- ))}
+ );
+ })}
>
>
diff --git a/src/frontend/src/components/sidebarComponent/hooks/use-on-file-drop.tsx b/src/frontend/src/components/sidebarComponent/hooks/use-on-file-drop.tsx
index ce0ad3614..141dcb110 100644
--- a/src/frontend/src/components/sidebarComponent/hooks/use-on-file-drop.tsx
+++ b/src/frontend/src/components/sidebarComponent/hooks/use-on-file-drop.tsx
@@ -7,13 +7,16 @@ import { uploadFlowsFromFolders } from "../../../pages/MainPage/services";
import useAlertStore from "../../../stores/alertStore";
import useFlowsManagerStore from "../../../stores/flowsManagerStore";
import { useFolderStore } from "../../../stores/foldersStore";
-import { FlowType } from "../../../types/flow";
+import { addVersionToDuplicates } from "../../../utils/reactflowUtils";
const useFileDrop = (folderId, folderChangeCallback) => {
const setFolderDragging = useFolderStore((state) => state.setFolderDragging);
+ const setFolderIdDragging = useFolderStore(
+ (state) => state.setFolderIdDragging,
+ );
+
const setErrorData = useAlertStore((state) => state.setErrorData);
const getFoldersApi = useFolderStore((state) => state.getFoldersApi);
- const refreshFlows = useFlowsManagerStore((state) => state.refreshFlows);
const flows = useFlowsManagerStore((state) => state.flows);
const triggerFolderChange = (folderId) => {
@@ -42,12 +45,14 @@ const useFileDrop = (folderId, folderChangeCallback) => {
| React.DragEvent
| React.DragEvent
| React.DragEvent,
+ folderId: string,
) => {
e.preventDefault();
if (e.dataTransfer.types.some((types) => types === "Files")) {
setFolderDragging(true);
}
+ setFolderIdDragging(folderId);
};
const dragEnter = (
@@ -55,10 +60,12 @@ const useFileDrop = (folderId, folderChangeCallback) => {
| React.DragEvent
| React.DragEvent
| React.DragEvent,
+ folderId: string,
) => {
if (e.dataTransfer.types.some((types) => types === "Files")) {
setFolderDragging(true);
}
+ setFolderIdDragging(folderId);
e.preventDefault();
};
@@ -71,6 +78,7 @@ const useFileDrop = (folderId, folderChangeCallback) => {
e.preventDefault();
if (e.target === e.currentTarget) {
setFolderDragging(false);
+ setFolderIdDragging("");
}
};
@@ -92,7 +100,6 @@ const useFileDrop = (folderId, folderChangeCallback) => {
e.preventDefault();
handleFileDrop(e);
- setFolderDragging(false);
};
const uploadFromDragCard = (flowId, folderId) => {
@@ -101,11 +108,15 @@ const useFileDrop = (folderId, folderChangeCallback) => {
if (!selectedFlow) {
throw new Error("Flow not found");
}
+ const updatedFlow = { ...selectedFlow, folder_id: folderId };
+
+ const newName = addVersionToDuplicates(updatedFlow, flows);
+
+ updatedFlow.name = newName;
+
+ setFolderDragging(false);
+ setFolderIdDragging("");
- const updatedFlow: FlowType = {
- ...selectedFlow,
- folder_id: folderId,
- };
updateFlowInDatabase(updatedFlow).then(() => {
getFoldersApi(true);
triggerFolderChange(folderId);
@@ -115,11 +126,11 @@ const useFileDrop = (folderId, folderChangeCallback) => {
const uploadFormData = (data) => {
const formData = new FormData();
formData.append("file", data);
-
+ setFolderDragging(false);
+ setFolderIdDragging("");
uploadFlowsFromFolders(formData).then(() => {
getFoldersApi(true);
triggerFolderChange(folderId);
- refreshFlows();
});
};
diff --git a/src/frontend/src/components/sidebarComponent/index.tsx b/src/frontend/src/components/sidebarComponent/index.tsx
index 63fec9661..396373705 100644
--- a/src/frontend/src/components/sidebarComponent/index.tsx
+++ b/src/frontend/src/components/sidebarComponent/index.tsx
@@ -5,6 +5,9 @@ import { cn } from "../../utils/utils";
import HorizontalScrollFadeComponent from "../horizontalScrollFadeComponent";
import SideBarButtonsComponent from "./components/sideBarButtons";
import SideBarFoldersButtonsComponent from "./components/sideBarFolderButtons";
+import { addFolder } from "../../pages/MainPage/services";
+import { useNavigate } from "react-router-dom";
+import useFlowStore from "../../stores/flowStore";
type SidebarNavProps = {
items: {
@@ -22,7 +25,6 @@ type SidebarNavProps = {
export default function SidebarNav({
className,
items,
- handleOpenNewFolderModal,
handleChangeFolder,
handleEditFolder,
handleDeleteFolder,
@@ -39,11 +41,7 @@ export default function SidebarNav({
return (
-
+
{!loadingFolders && folders?.length > 0 && isFolderPath && (
)}
diff --git a/src/frontend/src/components/tableComponent/index.tsx b/src/frontend/src/components/tableComponent/index.tsx
index 5f7c2ecdb..6113316be 100644
--- a/src/frontend/src/components/tableComponent/index.tsx
+++ b/src/frontend/src/components/tableComponent/index.tsx
@@ -32,51 +32,6 @@ const TableComponent = forwardRef<
ref,
) => {
const dark = useDarkStore((state) => state.dark);
- var currentRowHeight: number;
- var minRowHeight = 25;
-
- const getRowHeight = useCallback(() => {
- return currentRowHeight;
- }, []);
-
- const onGridReady = useCallback((params: any) => {
- minRowHeight = params.api.getSizesForCurrentTheme().rowHeight;
- currentRowHeight = minRowHeight;
- }, []);
-
- const updateRowHeight = (params: { api: any }) => {
- const bodyViewport = document.querySelector(".ag-body-viewport");
- if (!bodyViewport) {
- return;
- }
- var gridHeight = bodyViewport.clientHeight;
- var renderedRowCount = params.api.getDisplayedRowCount();
-
- if (renderedRowCount * minRowHeight >= gridHeight) {
- if (currentRowHeight !== minRowHeight) {
- currentRowHeight = minRowHeight;
- params.api.resetRowHeights();
- }
- } else {
- currentRowHeight = Math.floor(gridHeight / renderedRowCount);
- params.api.resetRowHeights();
- }
- };
-
- const onFirstDataRendered = useCallback(
- (params: any) => {
- updateRowHeight(params);
- },
- [updateRowHeight],
- );
-
- const onGridSizeChanged = useCallback(
- (params: any) => {
- updateRowHeight(params);
- },
- [updateRowHeight],
- );
-
if (props.rowData.length === 0) {
return (
@@ -102,10 +57,6 @@ const TableComponent = forwardRef<
{
const lastUrl = localStorage.getItem("lastUrlCalled");
+ const lastMethodCalled = localStorage.getItem("lastMethodCalled");
+
+ const isContained = AUTHORIZED_DUPLICATE_REQUESTS.some((request) =>
+ config?.url!.includes(request),
+ );
if (
config?.url === lastUrl &&
- config?.url !== "/health" &&
- config?.method === "get"
+ !isContained &&
+ lastMethodCalled === config.method
) {
return Promise.reject("Duplicate request");
}
localStorage.setItem("lastUrlCalled", config.url ?? "");
+ localStorage.setItem("lastMethodCalled", config.method ?? "");
+ localStorage.setItem(
+ "lastRequestData",
+ JSON.stringify(config.data) ?? "",
+ );
const accessToken = cookies.get("access_token_lf");
if (accessToken && !isAuthorizedURL(config?.url)) {
diff --git a/src/frontend/src/customNodes/genericNode/components/parameterComponent/index.tsx b/src/frontend/src/customNodes/genericNode/components/parameterComponent/index.tsx
index c87c97741..1b39e25bd 100644
--- a/src/frontend/src/customNodes/genericNode/components/parameterComponent/index.tsx
+++ b/src/frontend/src/customNodes/genericNode/components/parameterComponent/index.tsx
@@ -173,8 +173,6 @@ export default function ParameterComponent({
renderTooltips();
}, [tooltipTitle, flow]);
- console.log(left === true && type === "dict");
-
return !showNode ? (
left && LANGFLOW_SUPPORTED_TYPES.has(type ?? "") && !optionalHandle ? (
<>>
diff --git a/src/frontend/src/customNodes/utils/get-field-title.tsx b/src/frontend/src/customNodes/utils/get-field-title.tsx
index 93db719df..e448c4f01 100644
--- a/src/frontend/src/customNodes/utils/get-field-title.tsx
+++ b/src/frontend/src/customNodes/utils/get-field-title.tsx
@@ -1,10 +1,10 @@
import { APITemplateType } from "../../types/api";
export default function getFieldTitle(
- template: APITemplateType,
- templateField: string,
- ): string {
- return template[templateField].display_name
- ? template[templateField].display_name!
- : template[templateField].name ?? templateField;
- }
+ template: APITemplateType,
+ templateField: string,
+): string {
+ return template[templateField].display_name
+ ? template[templateField].display_name!
+ : template[templateField].name ?? templateField;
+}
diff --git a/src/frontend/src/customNodes/utils/sort-fields.tsx b/src/frontend/src/customNodes/utils/sort-fields.tsx
index d4dc473e5..b432d57ed 100644
--- a/src/frontend/src/customNodes/utils/sort-fields.tsx
+++ b/src/frontend/src/customNodes/utils/sort-fields.tsx
@@ -1,40 +1,40 @@
import { priorityFields } from "../../constants/constants";
export default function sortFields(a, b, fieldOrder) {
- // Early return for empty fields
- if (!a && !b) return 0;
- if (!a) return 1;
- if (!b) return -1;
-
- // Normalize the case to ensure case-insensitive comparison
- const normalizedFieldA = a.toLowerCase();
- const normalizedFieldB = b.toLowerCase();
-
- const aIsPriority = priorityFields.has(normalizedFieldA);
- const bIsPriority = priorityFields.has(normalizedFieldB);
-
- // Sort by priority
- if (aIsPriority && !bIsPriority) return -1;
- if (!aIsPriority && bIsPriority) return 1;
-
- // Check if either field is in the fieldOrder array
- const indexOfA = fieldOrder.indexOf(normalizedFieldA);
- const indexOfB = fieldOrder.indexOf(normalizedFieldB);
-
- // If both fields are in fieldOrder, sort by their order in the array
- if (indexOfA !== -1 && indexOfB !== -1) {
- return indexOfA - indexOfB;
- }
-
- // If only one of the fields is in fieldOrder, that field comes first
- if (indexOfA !== -1) {
- return -1;
- }
- if (indexOfB !== -1) {
- return 1;
- }
-
- // Default case for fields not in priorityFields and not found in fieldOrder
- // You might want to sort them alphabetically or in another specific manner
- return a.localeCompare(b);
+ // Early return for empty fields
+ if (!a && !b) return 0;
+ if (!a) return 1;
+ if (!b) return -1;
+
+ // Normalize the case to ensure case-insensitive comparison
+ const normalizedFieldA = a.toLowerCase();
+ const normalizedFieldB = b.toLowerCase();
+
+ const aIsPriority = priorityFields.has(normalizedFieldA);
+ const bIsPriority = priorityFields.has(normalizedFieldB);
+
+ // Sort by priority
+ if (aIsPriority && !bIsPriority) return -1;
+ if (!aIsPriority && bIsPriority) return 1;
+
+ // Check if either field is in the fieldOrder array
+ const indexOfA = fieldOrder.indexOf(normalizedFieldA);
+ const indexOfB = fieldOrder.indexOf(normalizedFieldB);
+
+ // If both fields are in fieldOrder, sort by their order in the array
+ if (indexOfA !== -1 && indexOfB !== -1) {
+ return indexOfA - indexOfB;
}
+
+ // If only one of the fields is in fieldOrder, that field comes first
+ if (indexOfA !== -1) {
+ return -1;
+ }
+ if (indexOfB !== -1) {
+ return 1;
+ }
+
+ // Default case for fields not in priorityFields and not found in fieldOrder
+ // You might want to sort them alphabetically or in another specific manner
+ return a.localeCompare(b);
+}
diff --git a/src/frontend/src/hooks/use-track-last-visited-path.tsx b/src/frontend/src/hooks/use-track-last-visited-path.tsx
new file mode 100644
index 000000000..fa5990800
--- /dev/null
+++ b/src/frontend/src/hooks/use-track-last-visited-path.tsx
@@ -0,0 +1,14 @@
+import { useEffect } from "react";
+import { useLocation } from "react-router-dom";
+import { useLocationStore } from "../stores/locationStore";
+
+function useTrackLastVisitedPath() {
+ const location = useLocation();
+ const setHistory = useLocationStore((state) => state.setRouteHistory);
+
+ useEffect(() => {
+ setHistory(location.pathname);
+ }, [location]);
+}
+
+export default useTrackLastVisitedPath;
diff --git a/src/frontend/src/modals/apiModal/utils/get-curl-code.tsx b/src/frontend/src/modals/apiModal/utils/get-curl-code.tsx
index 9ae8ec48f..b0a7f098d 100644
--- a/src/frontend/src/modals/apiModal/utils/get-curl-code.tsx
+++ b/src/frontend/src/modals/apiModal/utils/get-curl-code.tsx
@@ -5,13 +5,13 @@
* @returns {string} - The curl code
*/
export default function getCurlCode(
- flowId: string,
- isAuth: boolean,
- tweaksBuildedObject,
- ): string {
- const tweaksObject = tweaksBuildedObject[0];
-
- return `curl -X POST \\
+ flowId: string,
+ isAuth: boolean,
+ tweaksBuildedObject,
+): string {
+ const tweaksObject = tweaksBuildedObject[0];
+
+ return `curl -X POST \\
${window.location.protocol}//${
window.location.host
}/api/v1/run/${flowId}?stream=false \\
@@ -23,4 +23,4 @@ export default function getCurlCode(
"input_type": "chat",
"tweaks": ${JSON.stringify(tweaksObject, null, 2)}'
`;
- }
+}
diff --git a/src/frontend/src/modals/apiModal/utils/get-python-api-code.tsx b/src/frontend/src/modals/apiModal/utils/get-python-api-code.tsx
index 207ae2d48..b77ff3c5e 100644
--- a/src/frontend/src/modals/apiModal/utils/get-python-api-code.tsx
+++ b/src/frontend/src/modals/apiModal/utils/get-python-api-code.tsx
@@ -6,20 +6,20 @@
* @returns {string} - The python code
*/
export default function getPythonApiCode(
- flowId: string,
- isAuth: boolean,
- tweaksBuildedObject,
- ): string {
- const tweaksObject = tweaksBuildedObject[0];
- return `import requests
+ flowId: string,
+ isAuth: boolean,
+ tweaksBuildedObject,
+): string {
+ const tweaksObject = tweaksBuildedObject[0];
+ return `import requests
from typing import Optional
-
+
BASE_API_URL = "${window.location.protocol}//${window.location.host}/api/v1/run"
FLOW_ID = "${flowId}"
# You can tweak the flow by adding a tweaks dictionary
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
TWEAKS = ${JSON.stringify(tweaksObject, null, 2)}
-
+
def run_flow(message: str,
flow_id: str,
output_type: str = "chat",
@@ -28,14 +28,14 @@ export default function getPythonApiCode(
api_key: Optional[str] = None) -> dict:
"""
Run a flow with a given message and optional tweaks.
-
+
:param message: The message to send to the flow
:param flow_id: The ID of the flow to run
:param tweaks: Optional tweaks to customize the flow
:return: The JSON response from the flow
"""
api_url = f"{BASE_API_URL}/{flow_id}"
-
+
payload = {
"input_value": message,
"output_type": output_type,
@@ -48,11 +48,11 @@ export default function getPythonApiCode(
headers = {"x-api-key": api_key}
response = requests.post(api_url, json=payload, headers=headers)
return response.json()
-
+
# Setup any tweaks you want to apply to the flow
message = "message"
${!isAuth ? `api_key = ""` : ""}
print(run_flow(message=message, flow_id=FLOW_ID, tweaks=TWEAKS${
- !isAuth ? `, api_key=api_key` : ""
- }))`;
- }
\ No newline at end of file
+ !isAuth ? `, api_key=api_key` : ""
+ }))`;
+}
diff --git a/src/frontend/src/modals/apiModal/utils/get-python-code.tsx b/src/frontend/src/modals/apiModal/utils/get-python-code.tsx
index d5327c3e7..9e1ad8052 100644
--- a/src/frontend/src/modals/apiModal/utils/get-python-code.tsx
+++ b/src/frontend/src/modals/apiModal/utils/get-python-code.tsx
@@ -4,14 +4,17 @@
* @param {any[]} tweak - The tweaks
* @returns {string} - The python code
*/
-export default function getPythonCode(flowName: string, tweaksBuildedObject): string {
- const tweaksObject = tweaksBuildedObject[0];
-
- return `from langflow.load import run_flow_from_json
+export default function getPythonCode(
+ flowName: string,
+ tweaksBuildedObject,
+): string {
+ const tweaksObject = tweaksBuildedObject[0];
+
+ return `from langflow.load import run_flow_from_json
TWEAKS = ${JSON.stringify(tweaksObject, null, 2)}
-
+
result = run_flow_from_json(flow="${flowName}.json",
input_value="message",
fallback_to_env_vars=True, # False by default
tweaks=TWEAKS)`;
- }
+}
diff --git a/src/frontend/src/modals/apiModal/utils/get-widget-code.tsx b/src/frontend/src/modals/apiModal/utils/get-widget-code.tsx
index a44cba757..0d3d02f91 100644
--- a/src/frontend/src/modals/apiModal/utils/get-widget-code.tsx
+++ b/src/frontend/src/modals/apiModal/utils/get-widget-code.tsx
@@ -4,12 +4,12 @@
* @returns {string} - The widget code
*/
export default function getWidgetCode(
- flowId: string,
- flowName: string,
- isAuth: boolean,
- ): string {
- return `
-
+ flowId: string,
+ flowName: string,
+ isAuth: boolean,
+): string {
+ return `
+
`;
- }
+}
diff --git a/src/frontend/src/modals/apiModal/utils/tabs-array.tsx b/src/frontend/src/modals/apiModal/utils/tabs-array.tsx
index 434086b88..d3a675597 100644
--- a/src/frontend/src/modals/apiModal/utils/tabs-array.tsx
+++ b/src/frontend/src/modals/apiModal/utils/tabs-array.tsx
@@ -1,40 +1,6 @@
export default function tabsArray(codes: string[], method: number) {
- if (!method) return;
- if (method === 0) {
- return [
- {
- name: "cURL",
- mode: "bash",
- image: "https://curl.se/logo/curl-symbol-transparent.png",
- language: "sh",
- code: codes[0],
- },
- {
- name: "Python API",
- mode: "python",
- image:
- "https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w",
- language: "py",
- code: codes[1],
- },
- {
- name: "Python Code",
- mode: "python",
- image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
- language: "py",
- code: codes[2],
- },
- {
- name: "Chat Widget HTML",
- description:
- "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation .",
- mode: "html",
- image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
- language: "py",
- code: codes[3],
- },
- ];
- }
+ if (!method) return;
+ if (method === 0) {
return [
{
name: "cURL",
@@ -54,8 +20,8 @@ export default function tabsArray(codes: string[], method: number) {
{
name: "Python Code",
mode: "python",
- language: "py",
image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
+ language: "py",
code: codes[2],
},
{
@@ -67,12 +33,46 @@ export default function tabsArray(codes: string[], method: number) {
language: "py",
code: codes[3],
},
- {
- name: "Tweaks",
- mode: "python",
- image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
- language: "py",
- code: codes[4],
- },
];
}
+ return [
+ {
+ name: "cURL",
+ mode: "bash",
+ image: "https://curl.se/logo/curl-symbol-transparent.png",
+ language: "sh",
+ code: codes[0],
+ },
+ {
+ name: "Python API",
+ mode: "python",
+ image:
+ "https://images.squarespace-cdn.com/content/v1/5df3d8c5d2be5962e4f87890/1628015119369-OY4TV3XJJ53ECO0W2OLQ/Python+API+Training+Logo.png?format=1000w",
+ language: "py",
+ code: codes[1],
+ },
+ {
+ name: "Python Code",
+ mode: "python",
+ language: "py",
+ image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
+ code: codes[2],
+ },
+ {
+ name: "Chat Widget HTML",
+ description:
+ "Insert this code anywhere in your <body> tag. To use with react and other libs, check our documentation .",
+ mode: "html",
+ image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
+ language: "py",
+ code: codes[3],
+ },
+ {
+ name: "Tweaks",
+ mode: "python",
+ image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png",
+ language: "py",
+ code: codes[4],
+ },
+ ];
+}
diff --git a/src/frontend/src/modals/flowLogsModal/index.tsx b/src/frontend/src/modals/flowLogsModal/index.tsx
index 278c6f77e..70a8802c2 100644
--- a/src/frontend/src/modals/flowLogsModal/index.tsx
+++ b/src/frontend/src/modals/flowLogsModal/index.tsx
@@ -1,9 +1,9 @@
-import { useEffect, useState } from "react";
+import { useEffect, useRef, useState } from "react";
import IconComponent from "../../components/genericIconComponent";
import { Tabs, TabsList, TabsTrigger } from "../../components/ui/tabs";
import useFlowsManagerStore from "../../stores/flowsManagerStore";
import { FlowSettingsPropsType } from "../../types/components";
-import { FlowType } from "../../types/flow";
+import { FlowType, NodeDataType } from "../../types/flow";
import BaseModal from "../baseModal";
import TableComponent from "../../components/tableComponent";
import { getMessagesTable, getTransactionTable } from "../../controllers/API";
@@ -12,15 +12,20 @@ import {
ColGroupDef,
SizeColumnsToFitGridStrategy,
} from "ag-grid-community";
+import useAlertStore from "../../stores/alertStore";
+import useFlowStore from "../../stores/flowStore";
export default function FlowLogsModal({
open,
setOpen,
}: FlowSettingsPropsType): JSX.Element {
const saveFlow = useFlowsManagerStore((state) => state.saveFlow);
+ const nodes = useFlowStore((state) => state.nodes);
const currentFlow = useFlowsManagerStore((state) => state.currentFlow);
const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId);
const flows = useFlowsManagerStore((state) => state.flows);
+ const setNoticeData = useAlertStore((state) => state.setNoticeData);
+
useEffect(() => {
setName(currentFlow!.name);
setDescription(currentFlow!.description);
@@ -31,6 +36,7 @@ export default function FlowLogsModal({
const [columns, setColumns] = useState>([]);
const [rows, setRows] = useState([]);
const [activeTab, setActiveTab] = useState("Executions");
+ const noticed = useRef(false);
function handleClick(): void {
currentFlow!.name = name;
@@ -53,6 +59,25 @@ export default function FlowLogsModal({
setRows(rows);
});
}
+
+ if (open && activeTab === "Messages" && !noticed.current) {
+ const haStream = nodes
+ .map((nodes) => (nodes.data as NodeDataType).node!.template)
+ .some((template) => template["stream"] && template["stream"].value);
+ console.log(
+ haStream,
+ nodes.map((nodes) => (nodes.data as NodeDataType).node!.template),
+ );
+ if (haStream) {
+ setNoticeData({
+ title: "Streamed messages will not appear in this table.",
+ });
+ noticed.current = true;
+ }
+ }
+ if (!open) {
+ noticed.current = false;
+ }
}, [open, activeTab]);
const [nameLists, setNameList] = useState([]);
diff --git a/src/frontend/src/modals/shareModal/index.tsx b/src/frontend/src/modals/shareModal/index.tsx
index b6c5a2aed..a2c2f88d0 100644
--- a/src/frontend/src/modals/shareModal/index.tsx
+++ b/src/frontend/src/modals/shareModal/index.tsx
@@ -129,14 +129,14 @@ export default function ShareModal({
title: "Error sharing " + is_component ? "component" : "flow",
list: [err["response"]["data"]["detail"]],
});
- }
+ },
);
else
updateFlowStore(
flow!,
getTagsIds(selectedTags, tags),
sharePublic,
- unavaliableNames.find((e) => e.name === name)!.id
+ unavaliableNames.find((e) => e.name === name)!.id,
).then(successShare, (err) => {
setErrorData({
title: "Error sharing " + is_component ? "component" : "flow",
@@ -287,7 +287,7 @@ export default function ShareModal({
className={is_component ? "w-40" : "w-28"}
onClick={() => {
const isNameAvailable = !unavaliableNames.some(
- (element) => element.name === name
+ (element) => element.name === name,
);
if (isNameAvailable) {
diff --git a/src/frontend/src/modals/shareModal/utils/get-tags-ids.tsx b/src/frontend/src/modals/shareModal/utils/get-tags-ids.tsx
index 9bb71e20e..364d310a5 100644
--- a/src/frontend/src/modals/shareModal/utils/get-tags-ids.tsx
+++ b/src/frontend/src/modals/shareModal/utils/get-tags-ids.tsx
@@ -1,8 +1,8 @@
export default function getTagsIds(
- tags: string[],
- tagListId: { name: string; id: string }[],
- ) {
- return tags
- .map((tag) => tagListId.find((tagObj) => tagObj.name === tag))!
- .map((tag) => tag!.id);
+ tags: string[],
+ tagListId: { name: string; id: string }[],
+) {
+ return tags
+ .map((tag) => tagListId.find((tagObj) => tagObj.name === tag))!
+ .map((tag) => tag!.id);
}
diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/utils/get-random-name.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/utils/get-random-name.tsx
index d6213bcc0..d3bcf4d55 100644
--- a/src/frontend/src/pages/FlowPage/components/PageComponent/utils/get-random-name.tsx
+++ b/src/frontend/src/pages/FlowPage/components/PageComponent/utils/get-random-name.tsx
@@ -3,33 +3,33 @@ import { getRandomElement } from "../../../../../utils/reactflowUtils";
import { toTitleCase } from "../../../../../utils/utils";
export default function getRandomName(
- retry: number = 0,
- noSpace: boolean = false,
- maxRetries: number = 3,
- ): string {
- const left: string[] = ADJECTIVES;
- const right: string[] = NOUNS;
-
- const lv = getRandomElement(left);
- const rv = getRandomElement(right);
-
- // Condition to avoid "boring wozniak"
- if (lv === "boring" && rv === "wozniak") {
- if (retry < maxRetries) {
- return getRandomName(retry + 1, noSpace, maxRetries);
- } else {
- console.warn("Max retries reached, returning as is");
- }
+ retry: number = 0,
+ noSpace: boolean = false,
+ maxRetries: number = 3,
+): string {
+ const left: string[] = ADJECTIVES;
+ const right: string[] = NOUNS;
+
+ const lv = getRandomElement(left);
+ const rv = getRandomElement(right);
+
+ // Condition to avoid "boring wozniak"
+ if (lv === "boring" && rv === "wozniak") {
+ if (retry < maxRetries) {
+ return getRandomName(retry + 1, noSpace, maxRetries);
+ } else {
+ console.warn("Max retries reached, returning as is");
}
-
- // Append a suffix if retrying and noSpace is true
- if (retry > 0 && noSpace) {
- const retrySuffix = Math.floor(Math.random() * 10);
- return `${lv}_${rv}${retrySuffix}`;
- }
-
- // Construct the final name
- let final_name = noSpace ? `${lv}_${rv}` : `${lv} ${rv}`;
- // Return title case final name
- return toTitleCase(final_name);
- }
\ No newline at end of file
+ }
+
+ // Append a suffix if retrying and noSpace is true
+ if (retry > 0 && noSpace) {
+ const retrySuffix = Math.floor(Math.random() * 10);
+ return `${lv}_${rv}${retrySuffix}`;
+ }
+
+ // Construct the final name
+ let final_name = noSpace ? `${lv}_${rv}` : `${lv} ${rv}`;
+ // Return title case final name
+ return toTitleCase(final_name);
+}
diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
index 9ec1031c2..f8127014e 100644
--- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
+++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
@@ -19,10 +19,7 @@ import {
nodeIconsLucide,
nodeNames,
} from "../../../../utils/styleUtils";
-import {
- classNames,
- removeCountFromString,
-} from "../../../../utils/utils";
+import { classNames, removeCountFromString } from "../../../../utils/utils";
import DisclosureComponent from "../DisclosureComponent";
import ParentDisclosureComponent from "../ParentDisclosureComponent";
import SidebarDraggableComponent from "./sideBarDraggableComponent";
@@ -45,7 +42,7 @@ export default function ExtraSidebar(): JSX.Element {
const [search, setSearch] = useState("");
function onDragStart(
event: React.DragEvent,
- data: { type: string; node?: APIClassType }
+ data: { type: string; node?: APIClassType },
): void {
//start drag event
var crt = event.currentTarget.cloneNode(true);
@@ -71,7 +68,7 @@ export default function ExtraSidebar(): JSX.Element {
let keys = Object.keys(data[d]).filter(
(nd) =>
nd.toLowerCase().includes(e.toLowerCase()) ||
- data[d][nd].display_name?.toLowerCase().includes(e.toLowerCase())
+ data[d][nd].display_name?.toLowerCase().includes(e.toLowerCase()),
);
keys.forEach((element) => {
ret[d][element] = data[d][element];
@@ -138,7 +135,7 @@ export default function ExtraSidebar(): JSX.Element {
if (filtered.some((x) => x !== "")) {
let keys = Object.keys(dataClone[d]).filter((nd) =>
- filtered.includes(nd)
+ filtered.includes(nd),
);
Object.keys(dataClone[d]).forEach((element) => {
if (!keys.includes(element)) {
@@ -175,7 +172,7 @@ export default function ExtraSidebar(): JSX.Element {
if (filtered.some((x) => x !== "")) {
let keys = Object.keys(dataClone[d]).filter((nd) =>
- filtered.includes(nd)
+ filtered.includes(nd),
);
Object.keys(dataClone[d]).forEach((element) => {
if (!keys.includes(element)) {
@@ -204,7 +201,7 @@ export default function ExtraSidebar(): JSX.Element {
"extra-side-bar-buttons gap-[4px] text-sm font-semibold",
!hasApiKey || !validApiKey || !hasStore
? "button-disable cursor-default text-muted-foreground"
- : ""
+ : "",
)}
>
Share
),
- [hasApiKey, validApiKey, currentFlow, hasStore]
+ [hasApiKey, validApiKey, currentFlow, hasStore],
);
const ExportMemo = useMemo(
@@ -231,7 +228,7 @@ export default function ExtraSidebar(): JSX.Element {
),
- []
+ [],
);
const getIcon = useMemo(() => {
@@ -315,8 +312,8 @@ export default function ExtraSidebar(): JSX.Element {
.sort((a, b) =>
sensitiveSort(
dataFilter[SBSectionName][a].display_name,
- dataFilter[SBSectionName][b].display_name
- )
+ dataFilter[SBSectionName][b].display_name,
+ ),
)
.map((SBItemName: string, index) => (
) : (
- )
+ ),
)}{" "}
sensitiveSort(
dataFilter[SBSectionName][a].display_name,
- dataFilter[SBSectionName][b].display_name
- )
+ dataFilter[SBSectionName][b].display_name,
+ ),
)
.map((SBItemName: string, index) => (
) : (
- )
+ ),
)}
diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils/sensitive-sort.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils/sensitive-sort.tsx
index c0e7bd265..97c6fe1bb 100644
--- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils/sensitive-sort.tsx
+++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils/sensitive-sort.tsx
@@ -1,25 +1,24 @@
export default function sensitiveSort(a: string, b: string): number {
- // Extract the name and number from each string using regular expressions
- const regex = /(.+) \((\w+)\)/;
- const matchA = a.match(regex);
- const matchB = b.match(regex);
-
- if (matchA && matchB) {
- // Compare the names alphabetically
- const nameA = matchA[1];
- const nameB = matchB[1];
- if (nameA !== nameB) {
- return nameA.localeCompare(nameB);
- }
-
- // If the names are the same, compare the numbers numerically
- const numberA = parseInt(matchA[2]);
- const numberB = parseInt(matchB[2]);
- return numberA - numberB;
- } else {
- // Handle cases where one or both strings do not match the expected pattern
- // Simple strings are treated as pure alphabetical comparisons
- return a.localeCompare(b);
+ // Extract the name and number from each string using regular expressions
+ const regex = /(.+) \((\w+)\)/;
+ const matchA = a.match(regex);
+ const matchB = b.match(regex);
+
+ if (matchA && matchB) {
+ // Compare the names alphabetically
+ const nameA = matchA[1];
+ const nameB = matchB[1];
+ if (nameA !== nameB) {
+ return nameA.localeCompare(nameB);
}
+
+ // If the names are the same, compare the numbers numerically
+ const numberA = parseInt(matchA[2]);
+ const numberB = parseInt(matchB[2]);
+ return numberA - numberB;
+ } else {
+ // Handle cases where one or both strings do not match the expected pattern
+ // Simple strings are treated as pure alphabetical comparisons
+ return a.localeCompare(b);
+ }
}
-
\ No newline at end of file
diff --git a/src/frontend/src/pages/FlowPage/index.tsx b/src/frontend/src/pages/FlowPage/index.tsx
index 886ab5150..49fdb1eca 100644
--- a/src/frontend/src/pages/FlowPage/index.tsx
+++ b/src/frontend/src/pages/FlowPage/index.tsx
@@ -1,5 +1,5 @@
import { useEffect } from "react";
-import { useParams } from "react-router-dom";
+import { useNavigate, useParams } from "react-router-dom";
import FlowToolbar from "../../components/chatComponent";
import Header from "../../components/headerComponent";
import { useDarkStore } from "../../stores/darkStore";
@@ -16,9 +16,19 @@ export default function FlowPage({ view }: { view?: boolean }): JSX.Element {
const setOnFlowPage = useFlowStore((state) => state.setOnFlowPage);
const currentFlow = useFlowsManagerStore((state) => state.currentFlow);
const { id } = useParams();
+ const navigate = useNavigate();
+
+ const flows = useFlowsManagerStore((state) => state.flows);
// Set flow tab id
useEffect(() => {
+ const isAnExistingFlow = flows.some((flow) => flow.id === id);
+
+ if (!isAnExistingFlow) {
+ navigate("/all");
+ return;
+ }
+
setCurrentFlowId(id!);
setOnFlowPage(true);
diff --git a/src/frontend/src/pages/MainPage/entities/index.tsx b/src/frontend/src/pages/MainPage/entities/index.tsx
index 74c3c773f..8c46764ff 100644
--- a/src/frontend/src/pages/MainPage/entities/index.tsx
+++ b/src/frontend/src/pages/MainPage/entities/index.tsx
@@ -6,13 +6,14 @@ export type FolderType = {
id?: string | null;
parent_id: string;
flows: FlowType[];
+ components: string[];
};
export type AddFolderType = {
name: string;
description: string;
id?: string | null;
- parent_id: string;
+ parent_id: string | null;
flows?: string[];
components?: string[];
};
diff --git a/src/frontend/src/pages/MainPage/services/index.ts b/src/frontend/src/pages/MainPage/services/index.ts
index a554cd1d9..3e1286d5e 100644
--- a/src/frontend/src/pages/MainPage/services/index.ts
+++ b/src/frontend/src/pages/MainPage/services/index.ts
@@ -1,7 +1,7 @@
import { BASE_URL_API } from "../../../constants/constants";
import { api } from "../../../controllers/API/api";
import { FlowType } from "../../../types/flow";
-import { AddFolderType, FolderType, StarterProjectsType } from "../entities";
+import { AddFolderType, FolderType } from "../entities";
export async function getFolders(): Promise {
try {
@@ -61,15 +61,6 @@ export async function getFolderById(folderId: string): Promise {
}
}
-export async function getStarterProjects(): Promise {
- try {
- const response = await api.get(`${BASE_URL_API}folders/starter-projects`);
- return response?.data;
- } catch (error) {
- throw error;
- }
-}
-
export async function downloadFlowsFromFolders(folderId: string): Promise<{
flows: FlowType[];
folder_name: string;
diff --git a/src/frontend/src/stores/darkStore.tsx b/src/frontend/src/stores/darkStore.ts
similarity index 97%
rename from src/frontend/src/stores/darkStore.tsx
rename to src/frontend/src/stores/darkStore.ts
index 885c914c4..51a9edd0a 100644
--- a/src/frontend/src/stores/darkStore.tsx
+++ b/src/frontend/src/stores/darkStore.ts
@@ -36,7 +36,7 @@ export const useDarkStore = create((set, get) => ({
window.localStorage.setItem("githubStars", res.toString());
window.localStorage.setItem(
"githubStarsLastUpdated",
- new Date().toString()
+ new Date().toString(),
);
set(() => ({ stars: res, lastUpdated: new Date() }));
});
diff --git a/src/frontend/src/stores/flowsManagerStore.ts b/src/frontend/src/stores/flowsManagerStore.ts
index 052ec749e..81637fba0 100644
--- a/src/frontend/src/stores/flowsManagerStore.ts
+++ b/src/frontend/src/stores/flowsManagerStore.ts
@@ -11,7 +11,6 @@ import {
updateFlowInDatabase,
uploadFlowsToDatabase,
} from "../controllers/API";
-import { getStarterProjects } from "../pages/MainPage/services";
import { FlowType, NodeDataType } from "../types/flow";
import {
FlowsManagerStoreType,
@@ -81,38 +80,40 @@ const useFlowsManagerStore = create((set, get) => ({
return new Promise((resolve, reject) => {
set({ isLoading: true });
- getStarterProjects().then((starterProjects) => {
- get().setExamples(starterProjects?.flows!);
+ const starterFolderId = useFolderStore.getState().starterProjectId;
- readFlowsFromDatabase()
- .then((dbData) => {
- if (dbData) {
- const { data, flows } = processFlows(dbData, false);
- const starterProjectsIds = starterProjects.flows!.map(
- (flow) => flow.id,
- );
- get().setFlows(
- flows.filter((f) => !starterProjectsIds.includes(f.id)),
- );
- useTypesStore.setState((state) => ({
- data: { ...state.data, ["saved_components"]: data },
- ComponentFields: extractFieldsFromComponenents({
- ...state.data,
- ["saved_components"]: data,
- }),
- }));
- set({ isLoading: false });
- resolve();
- }
- })
- .catch((e) => {
+ readFlowsFromDatabase()
+ .then((dbData) => {
+ if (dbData) {
+ const { data, flows } = processFlows(dbData, false);
+ const examples = flows.filter(
+ (flow) => flow.folder_id === starterFolderId,
+ );
+ get().setExamples(examples);
+
+ const flowsWithoutStarterFolder = flows.filter(
+ (flow) => flow.folder_id !== starterFolderId,
+ );
+
+ get().setFlows(flowsWithoutStarterFolder);
+ useTypesStore.setState((state) => ({
+ data: { ...state.data, ["saved_components"]: data },
+ ComponentFields: extractFieldsFromComponenents({
+ ...state.data,
+ ["saved_components"]: data,
+ }),
+ }));
set({ isLoading: false });
- useAlertStore.getState().setErrorData({
- title: "Could not load flows from database",
- });
- reject(e);
+ resolve();
+ }
+ })
+ .catch((e) => {
+ set({ isLoading: false });
+ useAlertStore.getState().setErrorData({
+ title: "Could not load flows from database",
});
- });
+ reject(e);
+ });
});
},
autoSaveCurrentFlow: (nodes: Node[], edges: Edge[], viewport: Viewport) => {
@@ -204,11 +205,16 @@ const useFlowsManagerStore = create((set, get) => ({
: { nodes: [], edges: [], viewport: { zoom: 1, x: 0, y: 0 } };
// Create a new flow with a default name if no flow is provided.
+ const folder_id = useFolderStore.getState().folderUrl;
+ const my_collection_id = useFolderStore.getState().myCollectionId;
if (override) {
get().deleteComponent(flow!.name);
- const newFlow = createNewFlow(flowData!, flow!);
- newFlow.folder_id = useFolderStore.getState().folderUrl;
+ const newFlow = createNewFlow(
+ flowData!,
+ flow!,
+ folder_id || my_collection_id!,
+ );
const { id } = await saveFlowToDatabase(newFlow);
newFlow.id = id;
//setTimeout to prevent update state with wrong state
@@ -227,8 +233,12 @@ const useFlowsManagerStore = create((set, get) => ({
// addFlowToLocalState(newFlow);
return;
}
-
- const newFlow = createNewFlow(flowData!, flow!);
+ console.log("folder id", folder_id);
+ const newFlow = createNewFlow(
+ flowData!,
+ flow!,
+ folder_id || my_collection_id!,
+ );
const newName = addVersionToDuplicates(newFlow, get().flows);
diff --git a/src/frontend/src/stores/foldersStore.tsx b/src/frontend/src/stores/foldersStore.tsx
index 7d646a52a..9edfef135 100644
--- a/src/frontend/src/stores/foldersStore.tsx
+++ b/src/frontend/src/stores/foldersStore.tsx
@@ -1,5 +1,5 @@
import { create } from "zustand";
-import { DEFAULT_FOLDER } from "../constants/constants";
+import { DEFAULT_FOLDER, STARTER_FOLDER_NAME } from "../constants/constants";
import {
getFolderById,
getFolders,
@@ -11,24 +11,44 @@ import useFlowsManagerStore from "./flowsManagerStore";
export const useFolderStore = create((set, get) => ({
folders: [],
getFoldersApi: (refetch = false) => {
- if (get()?.folders.length === 0 || refetch === true) {
- get().setLoading(true);
- getFolders().then(
- (res) => {
- set({ folders: res });
- const myCollectionId = res?.find(
- (f) => f.name === DEFAULT_FOLDER,
- )?.id;
- set({ myCollectionId });
- get().setLoading(false);
- useFlowsManagerStore.getState().refreshFlows();
- },
- () => {
- set({ folders: [] });
- get().setLoading(false);
- },
- );
- }
+ return new Promise((resolve, reject) => {
+ if (get()?.folders.length === 0 || refetch === true) {
+ get().setLoading(true);
+ getFolders().then(
+ (res) => {
+ const foldersWithoutStarterProjects = res.filter(
+ (folder) => folder.name !== STARTER_FOLDER_NAME,
+ );
+
+ const starterProjects = res.find(
+ (folder) => folder.name === STARTER_FOLDER_NAME,
+ );
+
+ set({ starterProjectId: starterProjects!.id ?? "" });
+ set({ folders: foldersWithoutStarterProjects });
+
+ const myCollectionId = res?.find(
+ (f) => f.name === DEFAULT_FOLDER,
+ )?.id;
+
+ set({ myCollectionId });
+
+ if (refetch === true) {
+ useFlowsManagerStore.getState().refreshFlows();
+ useFlowsManagerStore.getState().setAllFlows;
+ }
+
+ get().setLoading(false);
+ resolve();
+ },
+ () => {
+ set({ folders: [] });
+ get().setLoading(false);
+ reject();
+ },
+ );
+ }
+ });
},
setFolders: (folders) => set(() => ({ folders: folders })),
loading: false,
@@ -93,12 +113,12 @@ export const useFolderStore = create((set, get) => ({
formData.append("file", file);
uploadFlowsFromFolders(formData).then(() => {
get().getFoldersApi(true);
- useFlowsManagerStore.getState().refreshFlows();
});
- useFlowsManagerStore.getState().setAllFlows;
}
};
input.click();
});
},
+ starterProjectId: "",
+ setStarterProjectId: (id) => set(() => ({ starterProjectId: id })),
}));
diff --git a/src/frontend/src/stores/locationStore.ts b/src/frontend/src/stores/locationStore.ts
new file mode 100644
index 000000000..8ef3554e0
--- /dev/null
+++ b/src/frontend/src/stores/locationStore.ts
@@ -0,0 +1,21 @@
+import { create } from "zustand";
+import { LocationStoreType } from "../types/zustand/location";
+
+export const useLocationStore = create((set, get) => ({
+ routeHistory: [],
+ setRouteHistory: (location) => {
+ let routeHistoryArray = get().routeHistory;
+ routeHistoryArray.push(location);
+
+ if (routeHistoryArray?.length > 9) {
+ routeHistoryArray.shift();
+ set({
+ routeHistory: routeHistoryArray,
+ });
+ }
+
+ set({
+ routeHistory: routeHistoryArray,
+ });
+ },
+}));
diff --git a/src/frontend/src/stores/storeStore.tsx b/src/frontend/src/stores/storeStore.ts
similarity index 100%
rename from src/frontend/src/stores/storeStore.tsx
rename to src/frontend/src/stores/storeStore.ts
diff --git a/src/frontend/src/types/components/index.ts b/src/frontend/src/types/components/index.ts
index 9d3d0f259..790a04d05 100644
--- a/src/frontend/src/types/components/index.ts
+++ b/src/frontend/src/types/components/index.ts
@@ -6,6 +6,7 @@ import { ChatMessageType } from "../chat";
import { FlowStyleType, FlowType, NodeDataType, NodeType } from "../flow/index";
import { sourceHandleType, targetHandleType } from "./../flow/index";
export type InputComponentType = {
+ name?: string;
autoFocus?: boolean;
onBlur?: (event: React.FocusEvent) => void;
value?: string;
@@ -32,6 +33,7 @@ export type InputComponentType = {
setSelectedOptions?: (value: string[]) => void;
objectOptions?: Array<{ name: string; id: string }>;
isObjectOption?: boolean;
+ onChangeFolderName?: (e: any) => void;
};
export type ToggleComponentType = {
enabled: boolean;
diff --git a/src/frontend/src/types/zustand/folders/index.ts b/src/frontend/src/types/zustand/folders/index.ts
index c63ed6b77..352be3baf 100644
--- a/src/frontend/src/types/zustand/folders/index.ts
+++ b/src/frontend/src/types/zustand/folders/index.ts
@@ -2,7 +2,7 @@ import { FolderType } from "../../../pages/MainPage/entities";
export type FoldersStoreType = {
folders: FolderType[];
- getFoldersApi: (refetch?: boolean) => void;
+ getFoldersApi: (refetch?: boolean) => Promise;
setFolders: (folders: FolderType[]) => void;
loading: boolean;
setLoading: (loading: boolean) => void;
@@ -23,4 +23,6 @@ export type FoldersStoreType = {
uploadFolder: (folderId: string) => void;
folderIdDragging: string;
setFolderIdDragging: (id: string) => void;
+ starterProjectId: string;
+ setStarterProjectId: (id: string) => void;
};
diff --git a/src/frontend/src/types/zustand/location/index.ts b/src/frontend/src/types/zustand/location/index.ts
new file mode 100644
index 000000000..d804019e6
--- /dev/null
+++ b/src/frontend/src/types/zustand/location/index.ts
@@ -0,0 +1,4 @@
+export type LocationStoreType = {
+ routeHistory: string[];
+ setRouteHistory: (location: string) => void;
+};
diff --git a/src/frontend/src/utils/buildUtils.ts b/src/frontend/src/utils/buildUtils.ts
index 6c0db6f8f..eab498c4c 100644
--- a/src/frontend/src/utils/buildUtils.ts
+++ b/src/frontend/src/utils/buildUtils.ts
@@ -10,14 +10,14 @@ import { VertexLayerElementType } from "../types/zustand/flow";
type BuildVerticesParams = {
flowId: string; // Assuming FlowType is the type for your flow
input_value?: any; // Replace any with the actual type if it's not any
- files?:string[];
+ files?: string[];
startNodeId?: string | null; // Assuming nodeId is of type string, and it's optional
stopNodeId?: string | null; // Assuming nodeId is of type string, and it's optional
onGetOrderSuccess?: () => void;
onBuildUpdate?: (
data: VertexBuildTypeAPI,
status: BuildStatus,
- buildId: string
+ buildId: string,
) => void; // Replace any with the actual type if it's not any
onBuildComplete?: (allNodesValid: boolean) => void;
onBuildError?: (title, list, idList: VertexLayerElementType[]) => void;
@@ -31,8 +31,8 @@ function getInactiveVertexData(vertexId: string): VertexBuildTypeAPI {
// Build VertexBuildTypeAPI
let inactiveData = {
results: {},
- logs:[],
- messages:[],
+ logs: [],
+ messages: [],
inactive: true,
};
let inactiveVertexData = {
@@ -55,7 +55,7 @@ export async function updateVerticesOrder(
startNodeId?: string | null,
stopNodeId?: string | null,
nodes?: Node[],
- edges?: Edge[]
+ edges?: Edge[],
): Promise<{
verticesLayers: VertexLayerElementType[][];
verticesIds: string[];
@@ -71,14 +71,13 @@ export async function updateVerticesOrder(
startNodeId,
stopNodeId,
nodes,
- edges
+ edges,
);
} catch (error: any) {
setErrorData({
title: "Oops! Looks like you missed something",
list: [error.response?.data?.detail ?? "Unknown Error"],
});
- debugger;
useFlowStore.getState().setIsBuilding(false);
throw new Error("Invalid nodes");
}
@@ -129,7 +128,7 @@ export async function buildVertices({
startNodeId,
stopNodeId,
nodes,
- edges
+ edges,
);
if (onValidateNodes) {
try {
@@ -191,14 +190,14 @@ export async function buildVertices({
onBuildUpdate(
getInactiveVertexData(element.id),
BuildStatus.INACTIVE,
- runId
+ runId,
);
}
if (element.reference) {
onBuildUpdate(
getInactiveVertexData(element.reference),
BuildStatus.INACTIVE,
- runId
+ runId,
);
}
buildResults.push(false);
@@ -224,7 +223,7 @@ export async function buildVertices({
if (stop) {
return;
}
- })
+ }),
);
// Once the current layer is built, move to the next layer
currentLayerIndex += 1;
@@ -253,7 +252,7 @@ async function buildVertex({
flowId: string;
id: string;
input_value: string;
- files?:string[];
+ files?: string[];
onBuildUpdate?: (data: any, status: BuildStatus) => void;
onBuildError?: (title, list, idList: VertexLayerElementType[]) => void;
verticesIds: string[];
@@ -261,7 +260,7 @@ async function buildVertex({
stopBuild: () => void;
}) {
try {
- const buildRes = await postBuildVertex(flowId, id, input_value,files);
+ const buildRes = await postBuildVertex(flowId, id, input_value, files);
const buildData: VertexBuildTypeAPI = buildRes.data;
if (onBuildUpdate) {
@@ -269,7 +268,7 @@ async function buildVertex({
onBuildError!(
"Error Building Component",
buildData.data.logs.map((log) => log.message),
- verticesIds.map((id) => ({ id }))
+ verticesIds.map((id) => ({ id })),
);
stopBuild();
}
@@ -281,7 +280,7 @@ async function buildVertex({
onBuildError!(
"Error Building Component",
[(error as AxiosError).response?.data?.detail ?? "Unknown Error"],
- verticesIds.map((id) => ({ id }))
+ verticesIds.map((id) => ({ id })),
);
stopBuild();
}
diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts
index eab2945ee..b379b445e 100644
--- a/src/frontend/src/utils/reactflowUtils.ts
+++ b/src/frontend/src/utils/reactflowUtils.ts
@@ -350,7 +350,12 @@ export function updateEdges(edges: Edge[]) {
}
export function addVersionToDuplicates(flow: FlowType, flows: FlowType[]) {
- const existingNames = flows.map((item) => item.name);
+ console.log("flow", flow);
+ console.log("flows", flows);
+ const existingNames = flows
+ .filter((f) => f.folder_id === flow.folder_id)
+ .map((item) => item.name);
+ console.log("existingNames", existingNames);
let newName = flow.name;
let count = 1;
@@ -1297,6 +1302,7 @@ export function getRandomDescription(): string {
export const createNewFlow = (
flowData: ReactFlowJsonObject,
flow: FlowType,
+ folderId: string,
) => {
return {
description: flow?.description ?? getRandomDescription(),
@@ -1304,7 +1310,7 @@ export const createNewFlow = (
data: flowData,
id: "",
is_component: flow?.is_component ?? false,
- folder_id: "",
+ folder_id: folderId,
};
};
diff --git a/src/frontend/tests/end-to-end/chatInputOutput.spec.ts b/src/frontend/tests/end-to-end/chatInputOutput.spec.ts
index b6dedc1bb..b5ab8ac42 100644
--- a/src/frontend/tests/end-to-end/chatInputOutput.spec.ts
+++ b/src/frontend/tests/end-to-end/chatInputOutput.spec.ts
@@ -1,101 +1,10 @@
import { expect, test } from "@playwright/test";
import { readFileSync } from "fs";
-test("user must interact with chat with Input/Output", async ({ page }) => {
- if (!process.env.CI) {
- dotenv.config();
- dotenv.config({ path: path.resolve(__dirname, "../../.env") });
- }
-
- await page.goto("/");
-
- await page.waitForTimeout(1000);
-
- let modalCount = 0;
- try {
- const modalTitleElement = await page?.getByTestId("modal-title");
- if (modalTitleElement) {
- modalCount = await modalTitleElement.count();
- }
- } catch (error) {
- modalCount = 0;
- }
-
- while (modalCount === 0) {
- await page.getByText("New Project", { exact: true }).click();
- await page.waitForTimeout(5000);
- modalCount = await page.getByTestId("modal-title")?.count();
- }
-
- await page.getByRole("heading", { name: "Basic Prompting" }).click();
- await page.waitForTimeout(1000);
-
- await page.getByTitle("fit view").click();
- await page.getByTitle("zoom out").click();
- await page.getByTitle("zoom out").click();
- await page.getByTitle("zoom out").click();
-
- if (!process.env.OPENAI_API_KEY) {
- //You must set the OPENAI_API_KEY on .env file to run this test
- expect(false).toBe(true);
- }
-
- await page
- .getByTestId("popover-anchor-input-openai_api_key")
- .fill(process.env.OPENAI_API_KEY ?? "");
- await page.getByText("Playground", { exact: true }).click();
- await page.getByPlaceholder("Send a message...").fill("Hello, how are you?");
- await page.getByTestId("icon-LucideSend").click();
- let valueUser = await page.getByTestId("sender_name_user").textContent();
- let valueAI = await page.getByTestId("sender_name_ai").textContent();
-
- expect(valueUser).toBe("User");
- expect(valueAI).toBe("AI");
-
- await page.keyboard.press("Escape");
-
- await page
- .getByTestId("textarea-input_value")
- .nth(1)
- .fill(
- "testtesttesttesttesttestte;.;.,;,.;,.;.,;,..,;;;;;;;;;;;;;;;;;;;;;,;.;,.;,.,;.,;.;.,~~çççççççççççççççççççççççççççççççççççççççisdajfdasiopjfaodisjhvoicxjiovjcxizopjviopasjioasfhjaiohf23432432432423423sttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttestççççççççççççççççççççççççççççççççç,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,!",
- );
- await page
- .getByTestId("popover-anchor-input-sender_name")
- .nth(1)
- .fill("TestSenderNameUser");
- await page
- .getByTestId("popover-anchor-input-sender_name")
- .nth(0)
- .fill("TestSenderNameAI");
-
- await page.getByText("Playground", { exact: true }).click();
- await page.getByTestId("icon-LucideSend").click();
-
- valueUser = await page
- .getByTestId("sender_name_testsendernameuser")
- .textContent();
- valueAI = await page
- .getByTestId("sender_name_testsendernameai")
- .textContent();
-
- expect(valueUser).toBe("TestSenderNameUser");
- expect(valueAI).toBe("TestSenderNameAI");
-
- expect(
- await page
- .getByText(
- "testtesttesttesttesttestte;.;.,;,.;,.;.,;,..,;;;;;;;;;;;;;;;;;;;;;,;.;,.;,.,;.,;.;.,~~çççççççççççççççççççççççççççççççççççççççisdajfdasiopjfaodisjhvoicxjiovjcxizopjviopasjioasfhjaiohf23432432432423423sttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttestççççççççççççççççççççççççççççççççç,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,!",
- { exact: true },
- )
- .isVisible(),
- );
-});
-
test("chat_io_teste", async ({ page }) => {
await page.goto("/");
await page.locator("span").filter({ hasText: "My Collection" }).isVisible();
- await page.waitForTimeout(3000);
+ await page.waitForTimeout(2000);
let modalCount = 0;
try {
diff --git a/src/frontend/tests/end-to-end/chatInputOutputUser.spec.ts b/src/frontend/tests/end-to-end/chatInputOutputUser.spec.ts
new file mode 100644
index 000000000..9399485d3
--- /dev/null
+++ b/src/frontend/tests/end-to-end/chatInputOutputUser.spec.ts
@@ -0,0 +1,92 @@
+import { expect, test } from "@playwright/test";
+
+test("user must interact with chat with Input/Output", async ({ page }) => {
+ if (!process.env.CI) {
+ dotenv.config();
+ dotenv.config({ path: path.resolve(__dirname, "../../.env") });
+ }
+
+ await page.goto("/");
+
+ await page.waitForTimeout(1000);
+
+ let modalCount = 0;
+ try {
+ const modalTitleElement = await page?.getByTestId("modal-title");
+ if (modalTitleElement) {
+ modalCount = await modalTitleElement.count();
+ }
+ } catch (error) {
+ modalCount = 0;
+ }
+
+ while (modalCount === 0) {
+ await page.getByText("New Project", { exact: true }).click();
+ await page.waitForTimeout(5000);
+ modalCount = await page.getByTestId("modal-title")?.count();
+ }
+
+ await page.getByRole("heading", { name: "Basic Prompting" }).click();
+ await page.waitForTimeout(1000);
+
+ await page.getByTitle("fit view").click();
+ await page.getByTitle("zoom out").click();
+ await page.getByTitle("zoom out").click();
+ await page.getByTitle("zoom out").click();
+
+ if (!process.env.OPENAI_API_KEY) {
+ //You must set the OPENAI_API_KEY on .env file to run this test
+ expect(false).toBe(true);
+ }
+
+ await page
+ .getByTestId("popover-anchor-input-openai_api_key")
+ .fill(process.env.OPENAI_API_KEY ?? "");
+ await page.getByText("Playground", { exact: true }).click();
+ await page.getByPlaceholder("Send a message...").fill("Hello, how are you?");
+ await page.getByTestId("icon-LucideSend").click();
+ let valueUser = await page.getByTestId("sender_name_user").textContent();
+ let valueAI = await page.getByTestId("sender_name_ai").textContent();
+
+ expect(valueUser).toBe("User");
+ expect(valueAI).toBe("AI");
+
+ await page.keyboard.press("Escape");
+
+ await page
+ .getByTestId("textarea-input_value")
+ .nth(1)
+ .fill(
+ "testtesttesttesttesttestte;.;.,;,.;,.;.,;,..,;;;;;;;;;;;;;;;;;;;;;,;.;,.;,.,;.,;.;.,~~çççççççççççççççççççççççççççççççççççççççisdajfdasiopjfaodisjhvoicxjiovjcxizopjviopasjioasfhjaiohf23432432432423423sttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttestççççççççççççççççççççççççççççççççç,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,!",
+ );
+ await page
+ .getByTestId("popover-anchor-input-sender_name")
+ .nth(1)
+ .fill("TestSenderNameUser");
+ await page
+ .getByTestId("popover-anchor-input-sender_name")
+ .nth(0)
+ .fill("TestSenderNameAI");
+
+ await page.getByText("Playground", { exact: true }).click();
+ await page.getByTestId("icon-LucideSend").click();
+
+ valueUser = await page
+ .getByTestId("sender_name_testsendernameuser")
+ .textContent();
+ valueAI = await page
+ .getByTestId("sender_name_testsendernameai")
+ .textContent();
+
+ expect(valueUser).toBe("TestSenderNameUser");
+ expect(valueAI).toBe("TestSenderNameAI");
+
+ expect(
+ await page
+ .getByText(
+ "testtesttesttesttesttestte;.;.,;,.;,.;.,;,..,;;;;;;;;;;;;;;;;;;;;;,;.;,.;,.,;.,;.;.,~~çççççççççççççççççççççççççççççççççççççççisdajfdasiopjfaodisjhvoicxjiovjcxizopjviopasjioasfhjaiohf23432432432423423sttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttestççççççççççççççççççççççççççççççççç,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,!",
+ { exact: true },
+ )
+ .isVisible(),
+ );
+});
diff --git a/src/frontend/tests/end-to-end/globalVariables.spec.ts b/src/frontend/tests/end-to-end/globalVariables.spec.ts
index a92438b87..920ba743b 100644
--- a/src/frontend/tests/end-to-end/globalVariables.spec.ts
+++ b/src/frontend/tests/end-to-end/globalVariables.spec.ts
@@ -43,7 +43,7 @@ test("GlobalVariables", async ({ page }) => {
const genericName = Math.random().toString();
const credentialName = Math.random().toString();
- await page.getByTestId("icon-Globe").nth(1).click();
+ await page.getByTestId("icon-Globe").nth(0).click();
await page.getByText("Add New Variable", { exact: true }).click();
await page
.getByPlaceholder("Insert a name for the variable...")
@@ -69,6 +69,9 @@ test("GlobalVariables", async ({ page }) => {
await page.getByText("Save Variable", { exact: true }).click();
expect(page.getByText(credentialName, { exact: true })).not.toBeNull();
await page.getByText(credentialName, { exact: true }).isVisible();
+ await page.getByText("Save Variable", { exact: true }).click();
+ await page.waitForTimeout(2000);
+
await page
.getByText(credentialName, { exact: true })
.hover()
diff --git a/src/frontend/tests/end-to-end/inputListComponent.spec.ts b/src/frontend/tests/end-to-end/inputListComponent.spec.ts
index 0913c98a1..ab687e7cb 100644
--- a/src/frontend/tests/end-to-end/inputListComponent.spec.ts
+++ b/src/frontend/tests/end-to-end/inputListComponent.spec.ts
@@ -28,7 +28,7 @@ test("InputListComponent", async ({ page }) => {
await page.waitForTimeout(1000);
await page
- .getByTestId("vectorsearchAstra DB Search")
+ .getByTestId("vectorstoresAstra DB")
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
diff --git a/test-results/.last-run.json b/test-results/.last-run.json
deleted file mode 100644
index 544c11fbc..000000000
--- a/test-results/.last-run.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "status": "failed",
- "failedTests": []
-}
diff --git a/tests/test_cli.py b/tests/test_cli.py
index 60595f48a..75614054f 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -2,7 +2,6 @@ from pathlib import Path
from tempfile import tempdir
import pytest
-
from langflow.__main__ import app
from langflow.services import deps
@@ -29,7 +28,7 @@ def test_components_path(runner, client, default_settings):
)
assert result.exit_code == 0, result.stdout
settings_service = deps.get_settings_service()
- assert str(temp_dir) in settings_service.settings.COMPONENTS_PATH
+ assert str(temp_dir) in settings_service.settings.components_path
def test_superuser(runner, client, session):
diff --git a/tests/test_creators.py b/tests/test_creators.py
deleted file mode 100644
index 177dd4105..000000000
--- a/tests/test_creators.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from typing import Dict, List
-
-import pytest
-from langflow.interface.agents.base import AgentCreator
-from langflow.interface.base import LangChainTypeCreator
-
-
-@pytest.fixture
-def sample_lang_chain_type_creator() -> LangChainTypeCreator:
- class SampleLangChainTypeCreator(LangChainTypeCreator):
- type_name: str = "test_type"
-
- def type_to_loader_dict(self) -> Dict: # type: ignore
- return {"test_type": "TestClass"}
-
- def to_list(self) -> List[str]:
- return ["node1", "node2"]
-
- def get_signature(self, name: str) -> Dict:
- return {
- "template": {"test_field": {"type": "str"}},
- "description": "test description",
- "base_classes": ["base_class1", "base_class2"],
- }
-
- return SampleLangChainTypeCreator()
-
-
-@pytest.fixture
-def sample_agent_creator() -> AgentCreator:
- return AgentCreator()
-
-
-def test_lang_chain_type_creator_to_dict(
- client,
- sample_lang_chain_type_creator: LangChainTypeCreator,
-):
- type_dict = sample_lang_chain_type_creator.to_dict()
-
- assert len(type_dict) == 1
- assert "test_type" in type_dict
- assert "node1" in type_dict["test_type"]
- assert "node2" in type_dict["test_type"]
- assert "template" in type_dict["test_type"]["node1"]
- assert "description" in type_dict["test_type"]["node1"]
- assert "base_classes" in type_dict["test_type"]["node1"]
-
-
-def test_agent_creator_type_to_loader_dict(sample_agent_creator: AgentCreator):
- type_to_loader_dict = sample_agent_creator.type_to_loader_dict
- assert len(type_to_loader_dict) > 0
- assert "JsonAgent"
diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py
index 07796c540..0b64f2c1a 100644
--- a/tests/test_custom_component.py
+++ b/tests/test_custom_component.py
@@ -5,14 +5,14 @@ from uuid import uuid4
import pytest
from langchain_core.documents import Document
-from langflow.interface.custom.base import CustomComponent
-from langflow.interface.custom.code_parser.code_parser import CodeParser, CodeSyntaxError
-from langflow.interface.custom.custom_component.component import Component, ComponentCodeNullError
+from langflow.custom import CustomComponent
+from langflow.custom.code_parser.code_parser import CodeParser, CodeSyntaxError
+from langflow.custom.custom_component.component import Component, ComponentCodeNullError
from langflow.services.database.models.flow import Flow, FlowCreate
code_default = """
from langflow.field_typing import Prompt
-from langflow.interface.custom.custom_component import CustomComponent
+from langflow.custom import CustomComponent
from langflow.field_typing import BaseLanguageModel
from langchain.chains import LLMChain
diff --git a/tests/test_custom_types.py b/tests/test_custom_types.py
deleted file mode 100644
index 33c854ba3..000000000
--- a/tests/test_custom_types.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Test this:
-import pytest
-from langflow.interface.custom.utils import get_function
-from langflow.interface.tools.custom import PythonFunction, PythonFunctionTool
-from langflow.utils import constants
-
-
-def test_python_function_tool():
- """Test Python function"""
- code = constants.DEFAULT_PYTHON_FUNCTION
- func = get_function(code)
- func = PythonFunctionTool(name="Test", description="Testing", code=code, func=func)
- assert func("text") == "text"
- # the tool decorator should raise an error if
- # the function is not str -> str
-
- # This raises ValidationError
- with pytest.raises(SyntaxError):
- code = pytest.CODE_WITH_SYNTAX_ERROR
- func = get_function(code)
- func = PythonFunctionTool(name="Test", description="Testing", code=code, func=func)
-
-
-def test_python_function():
- """Test Python function"""
- func = PythonFunction(code=constants.DEFAULT_PYTHON_FUNCTION)
- assert get_function(func.code)("text") == "text"
- # the tool decorator should raise an error if
- # the function is not str -> str
-
- # This raises ValidationError
- with pytest.raises(SyntaxError):
- func = PythonFunction(code=pytest.CODE_WITH_SYNTAX_ERROR)
diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py
index 893b2b6d3..e45b0898e 100644
--- a/tests/test_endpoints.py
+++ b/tests/test_endpoints.py
@@ -5,9 +5,8 @@ import pytest
from fastapi import status
from fastapi.testclient import TestClient
-from langflow.interface.custom.directory_reader.directory_reader import DirectoryReader
+from langflow.custom.directory_reader.directory_reader import DirectoryReader
from langflow.services.deps import get_settings_service
-from langflow.template.frontend_node.chains import TimeTravelGuideChainNode
def run_post(client, flow_id, headers, post_data):
@@ -265,13 +264,13 @@ def test_get_all(client: TestClient, logged_in_headers):
response = client.get("api/v1/all", headers=logged_in_headers)
assert response.status_code == 200
settings = get_settings_service().settings
- dir_reader = DirectoryReader(settings.COMPONENTS_PATH[0])
+ dir_reader = DirectoryReader(settings.components_path[0])
files = dir_reader.get_files()
# json_response is a dict of dicts
all_names = [component_name for _, components in response.json().items() for component_name in components]
json_response = response.json()
# We need to test the custom nodes
- assert len(all_names) > len(files)
+ assert len(all_names) == len(files)
assert "ChatInput" in json_response["inputs"]
assert "Prompt" in json_response["inputs"]
assert "ChatOutput" in json_response["outputs"]
@@ -385,7 +384,6 @@ def test_invalid_prompt(client: TestClient):
],
)
def test_various_prompts(client, prompt, expected_input_variables):
- TimeTravelGuideChainNode().to_dict()
PROMPT_REQUEST["template"] = prompt
response = client.post("api/v1/validate/prompt", json=PROMPT_REQUEST)
assert response.status_code == 200
diff --git a/tests/test_helper_components.py b/tests/test_helper_components.py
index 8414cdac5..01a69de74 100644
--- a/tests/test_helper_components.py
+++ b/tests/test_helper_components.py
@@ -1,6 +1,7 @@
from langchain_core.documents import Document
+
from langflow.components import helpers
-from langflow.interface.custom.utils import build_custom_component_template
+from langflow.custom.utils import build_custom_component_template
from langflow.schema import Record
diff --git a/tests/test_loading.py b/tests/test_loading.py
index bea558f30..5872d060f 100644
--- a/tests/test_loading.py
+++ b/tests/test_loading.py
@@ -1,9 +1,8 @@
import pytest
-
from langflow.graph import Graph
from langflow.graph.schema import RunOutputs
from langflow.initial_setup.setup import load_starter_projects
-from langflow.processing.load import load_flow_from_json, run_flow_from_json
+from langflow.load import load_flow_from_json, run_flow_from_json
@pytest.mark.noclient
@@ -38,6 +37,6 @@ def test_run_flow_from_json_object():
"""Test loading a flow from a json file and applying tweaks"""
_, projects = zip(*load_starter_projects())
project = [project for project in projects if "Basic Prompting" in project["name"]][0]
- results = run_flow_from_json(project, input_value="test")
+ results = run_flow_from_json(project, input_value="test", fallback_to_env_vars=True)
assert results is not None
assert all(isinstance(result, RunOutputs) for result in results)