diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index 7cae75457..000000000 --- a/.dockerignore +++ /dev/null @@ -1,7 +0,0 @@ -.venv/ -**/aws -node_modules -**/node_modules/ -dist/ -**/build/ -src/backend/langflow/frontend \ No newline at end of file diff --git a/.github/actions/poetry_caching/action.yml b/.github/actions/poetry_caching/action.yml index fb76b1723..e185e7094 100644 --- a/.github/actions/poetry_caching/action.yml +++ b/.github/actions/poetry_caching/action.yml @@ -77,7 +77,12 @@ runs: POETRY_VERSION: ${{ inputs.poetry-version }} PYTHON_VERSION: ${{ inputs.python-version }} # Install poetry using the python version installed by setup-python step. - run: pipx install "poetry==$POETRY_VERSION" --python '${{ steps.setup-python.outputs.python-path }}' --verbose + run: | + pipx install "poetry==$POETRY_VERSION" --python '${{ steps.setup-python.outputs.python-path }}' --verbose + pipx ensurepath + # Ensure the poetry binary is available in the PATH. + # Test that the poetry binary is available. + poetry --version - name: Restore pip and poetry cached dependencies uses: actions/cache@v4 diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml new file mode 100644 index 000000000..ef3c8f698 --- /dev/null +++ b/.github/workflows/create-release.yml @@ -0,0 +1,64 @@ +name: Create Release +on: + workflow_dispatch: + inputs: + version: + description: "Version to release" + required: true + type: string + release_type: + description: "Type of release (base or main)" + required: true + type: choice + options: + - base + - main + +env: + POETRY_VERSION: "1.8.2" +jobs: + release: + name: Build Langflow + runs-on: ubuntu-latest + outputs: + version: ${{ steps.check-version.outputs.version }} + steps: + - uses: actions/checkout@v4 + - name: Install poetry + run: pipx install poetry==$POETRY_VERSION + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: "poetry" + - name: Build project for distribution + run: | + if [ "${{ inputs.release_type }}" == "base" ]; then + make build base=true + else + make build main=true + fi + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: dist${{ inputs.release_type }} + path: ${{ inputs.release_type == 'base' && 'src/backend/base/dist' || 'dist' }} + create_release: + name: Create Release Job + runs-on: ubuntu-latest + needs: release + steps: + - uses: actions/download-artifact@v4 + with: + name: dist${{ inputs.release_type }} + path: dist + - name: Create Release Notes + uses: ncipollo/release-action@v1 + with: + artifacts: "dist/*" + token: ${{ secrets.GITHUB_TOKEN }} + draft: false + generateReleaseNotes: true + prerelease: true + tag: v${{ inputs.version }} + commit: dev diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 000000000..2c8e61547 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,63 @@ +name: Docker Build and Push +on: + workflow_call: + inputs: + version: + required: true + type: string + release_type: + required: true + type: string + workflow_dispatch: + inputs: + version: + required: true + type: string + release_type: + required: true + type: choice + options: + - base + - main + +jobs: + docker_build: + name: Build Docker Image + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set Dockerfile and Tags + id: set-vars + run: | + if [ "${{ inputs.release_type }}" == "base" ]; then + echo "DOCKERFILE=./docker/build_and_push_base.Dockerfile" >> $GITHUB_ENV + echo "TAGS=langflowai/langflow:base-${{ inputs.version }}" >> $GITHUB_ENV + else + echo "DOCKERFILE=./docker/build_and_push.Dockerfile" >> $GITHUB_ENV + echo "TAGS=langflowai/langflow:${{ inputs.version }},langflowai/langflow:1.0-alpha" >> $GITHUB_ENV + fi + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ${{ env.DOCKERFILE }} + tags: ${{ env.TAGS }} + - name: Wait for Docker Hub to propagate + run: sleep 120 + - name: Restart HuggingFace Spaces Build + # There's a script in ./scripts/factory_reset_space.py that will reset the build + # using the HUGGINGFACE_API_TOKEN secret + run: | + python ./scripts/factory_restart_space.py + env: + HUGGINGFACE_API_TOKEN: ${{ secrets.HUGGINGFACE_API_TOKEN }} diff --git a/.github/workflows/pre-release-base.yml b/.github/workflows/pre-release-base.yml index cbcfe3ad6..d087fc183 100644 --- a/.github/workflows/pre-release-base.yml +++ b/.github/workflows/pre-release-base.yml @@ -72,6 +72,6 @@ jobs: with: context: . push: true - file: ./build_and_push_base.Dockerfile + file: ./docker/build_and_push_base.Dockerfile tags: | langflowai/langflow:base-${{ needs.release.outputs.version }} diff --git a/.github/workflows/pre-release-langflow.yml b/.github/workflows/pre-release-langflow.yml index 5dcd69617..82cb580f3 100644 --- a/.github/workflows/pre-release-langflow.yml +++ b/.github/workflows/pre-release-langflow.yml @@ -78,7 +78,7 @@ jobs: with: context: . push: true - file: ./build_and_push.Dockerfile + file: ./docker/build_and_push.Dockerfile tags: | langflowai/langflow:${{ needs.release.outputs.version }} langflowai/langflow:1.0-alpha diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml new file mode 100644 index 000000000..b72def8b3 --- /dev/null +++ b/.github/workflows/pre-release.yml @@ -0,0 +1,104 @@ +name: Langflow Pre-release (Unified) +run-name: Langflow (${{inputs.release_type}}) Pre-release by @${{ github.actor }} +on: + workflow_dispatch: + inputs: + release_package: + description: "Release package" + required: true + type: boolean + default: false + release_type: + description: "Type of release (base or main)" + required: true + type: choice + options: + - base + - main + +env: + POETRY_VERSION: "1.8.2" + +jobs: + release: + name: Release Langflow + if: inputs.release_package == true + runs-on: ubuntu-latest + outputs: + version: ${{ steps.check-version.outputs.version }} + steps: + - uses: actions/checkout@v4 + - name: Install poetry + run: pipx install poetry==$POETRY_VERSION + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: "3.10" + cache: "poetry" + - name: Check Version + id: check-version + run: | + if [ "${{ inputs.release_type }}" == "base" ]; then + version=$(cd src/backend/base && poetry version --short) + last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1) + else + version=$(poetry version --short) + last_released_version=$(curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1) + fi + if [ "$version" = "$last_released_version" ]; then + echo "Version $version is already released. Skipping release." + exit 1 + else + echo version=$version >> $GITHUB_OUTPUT + fi + - name: Build project for distribution + run: | + if [ "${{ inputs.release_type }}" == "base" ]; then + make build base=true + else + make build main=true + fi + - name: Publish to PyPI + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} + run: | + if [ "${{ inputs.release_type }}" == "base" ]; then + make publish base=true + else + make publish main=true + fi + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: dist${{ inputs.release_type }} + path: ${{ inputs.release_type == 'base' && 'src/backend/base/dist' || 'dist' }} + + call_docker_build: + name: Call Docker Build Workflow + needs: release + uses: langflow-ai/langflow/.github/workflows/docker-build.yml@dev + with: + version: ${{ needs.release.outputs.version }} + release_type: ${{ inputs.release_type }} + secrets: inherit + + create_release: + name: Create Release + runs-on: ubuntu-latest + needs: [release] + if: ${{ inputs.release_type == 'main' }} + steps: + - uses: actions/download-artifact@v4 + with: + name: dist${{ inputs.release_type }} + path: dist + - name: Create Release + uses: ncipollo/release-action@v1 + with: + artifacts: "dist/*" + token: ${{ secrets.GITHUB_TOKEN }} + draft: false + generateReleaseNotes: true + prerelease: true + tag: v${{ needs.release.outputs.version }} + commit: dev diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c19a491c3..06df72e9f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -50,7 +50,7 @@ jobs: with: context: . push: true - file: ./build_and_push.Dockerfile + file: ./docker/build_and_push.Dockerfile tags: | langflowai/langflow:${{ steps.check-version.outputs.version }} langflowai/langflow:latest diff --git a/.github/workflows/typescript_test.yml b/.github/workflows/typescript_test.yml index dc5bc061e..be081bec6 100644 --- a/.github/workflows/typescript_test.yml +++ b/.github/workflows/typescript_test.yml @@ -19,8 +19,8 @@ jobs: strategy: fail-fast: false matrix: - shardIndex: [1] - shardTotal: [1] + shardIndex: [1, 2, 3, 4] + shardTotal: [4] steps: - name: Checkout code uses: actions/checkout@v4 @@ -30,7 +30,15 @@ jobs: id: setup-node with: node-version: ${{ env.NODE_VERSION }} - cache: "npm" + + - name: Cache Node.js dependencies + uses: actions/cache@v4 + id: npm-cache + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('src/frontend/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- - name: Install Node.js dependencies run: | @@ -80,7 +88,7 @@ jobs: - name: Run Playwright Tests run: | cd src/frontend - npx playwright test + npx playwright test --shard ${{ matrix.shardIndex }}/${{ matrix.shardTotal }} --workers 2 - name: Upload blob report to GitHub Actions Artifacts if: always() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 60505515a..d03df05b9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,17 +18,13 @@ repos: hooks: - id: check-case-conflict - id: end-of-file-fixer + # python, js and ts only + files: \.(py|js|ts)$ - id: mixed-line-ending + files: \.(py|js|ts)$ args: - --fix=lf - id: trailing-whitespace - - id: pretty-format-json - exclude: ^tsconfig.*.json - args: - - --autofix - - --indent=4 - - --no-sort-keys - - id: check-merge-conflict - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. rev: v0.4.2 diff --git a/.readthedocs.yaml b/.readthedocs.yaml deleted file mode 100644 index 82dfe1f85..000000000 --- a/.readthedocs.yaml +++ /dev/null @@ -1,31 +0,0 @@ -# Read the Docs configuration file for Sphinx projects -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - -# Required -version: 2 - -# Set the OS, Python version and other tools you might need -build: - os: ubuntu-22.04 - tools: - python: "3.11" - # You can also specify other tool versions: - # nodejs: "19" - # rust: "1.64" - # golang: "1.19" - -# Build documentation in the "docs/" directory with Sphinx -sphinx: - configuration: docs/conf.py - -# Optionally build your docs in additional formats such as PDF and ePub -# formats: -# - pdf -# - epub - -# Optional but recommended, declare the Python requirements required -# to build your documentation -# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html -# python: -# install: -# - requirements: docs/requirements.txt \ No newline at end of file diff --git a/README.md b/README.md index a753ea830..d4776daeb 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,49 @@ -# [![Langflow](https://github.com/langflow-ai/langflow/blob/dev/docs/static/img/hero.png)](https://www.langflow.org) +# [![Langflow](./docs/static/img/hero.png)](https://www.langflow.org) -### [Langflow](https://www.langflow.org) is a new, visual way to build, iterate and deploy AI apps. +

+ A visual framework for building multi-agent and RAG applications +

+

+ Open-source, Python-powered, fully customizable, LLM and vector store agnostic +

-# ⚡️ Documentation and Community +

+ Docs - + Join our Discord - + Follow us on X - + Live demo +

-- [Documentation](https://docs.langflow.org) -- [Discord](https://discord.com/invite/EqksyE2EX9) +

+ + + + + + +

-# 📦 Installation +

+ Your GIF +

+ +# 📝 Content + +- [Get Started](#-get-started) +- [Create Flows](#-create-flows) +- [Deploy](#deploy) +- [Command Line Interface (CLI)](#️-command-line-interface-cli) +- [Contribute](#-contribute) + +# 📦 Get Started You can install Langflow with pip: ```shell -# Make sure you have Python 3.10 installed on your system. -# Install the pre-release version +# Make sure you have >=Python 3.10 installed on your system. +# Install the pre-release version (recommended for the latest updates) python -m pip install langflow --pre --force-reinstall # or stable version @@ -28,9 +56,9 @@ Then, run Langflow with: python -m langflow run ``` -You can also preview Langflow in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview). [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true), to create your own Langflow workspace in minutes. +You can also preview Langflow in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview). [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes. -# 🎨 Creating Flows +# 🎨 Create Flows Creating flows with Langflow is easy. Simply drag components from the sidebar onto the canvas and connect them to start building your application. @@ -46,6 +74,32 @@ from langflow.load import run_flow_from_json results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!") ``` +# Deploy + +## Deploy Langflow on Google Cloud Platform + +Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](https://github.com/langflow-ai/langflow/blob/dev/docs/docs/deployment/gcp-deployment.md) document. + +Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project. + +[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md) + +## Deploy on Railway + +Use this template to deploy Langflow 1.0 Preview on Railway: + +[![Deploy 1.0 Preview on Railway](https://railway.app/button.svg)](https://railway.app/template/UsJ1uB?referralCode=MnPSdg) + +Or this one to deploy Langflow 0.6.x: + +[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/JMXEWp?referralCode=MnPSdg) + +## Deploy on Render + + +Deploy to Render + + # 🖥️ Command Line Interface (CLI) Langflow provides a command-line interface (CLI) for easy management and configuration. @@ -65,7 +119,6 @@ Each option is detailed below: - `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`. - `--timeout`: Sets the worker timeout in seconds. The default is `60`. - `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`. -- `--config`: Defines the path to the configuration file. The default is `config.yaml`. - `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`. - `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`. - `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`. @@ -88,33 +141,7 @@ You can configure many of the CLI options using environment variables. These can A sample `.env` file named `.env.example` is included with the project. Copy this file to a new file named `.env` and replace the example values with your actual settings. If you're setting values in both your OS and the `.env` file, the `.env` settings will take precedence. -# Deployment - -## Deploy Langflow on Google Cloud Platform - -Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document. - -Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project. - -[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md) - -## Deploy on Railway - -Use this template to deploy Langflow 1.0 Preview on Railway: - -[![Deploy 1.0 Preview on Railway](https://railway.app/button.svg)](https://railway.app/template/UsJ1uB?referralCode=MnPSdg) - -Or this one to deploy Langflow 0.6.x: - -[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/JMXEWp?referralCode=MnPSdg) - -## Deploy on Render - - -Deploy to Render - - -# 👋 Contributing +# 👋 Contribute We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible. diff --git a/base.Dockerfile b/base.Dockerfile deleted file mode 100644 index 2325ece79..000000000 --- a/base.Dockerfile +++ /dev/null @@ -1,99 +0,0 @@ - - -# syntax=docker/dockerfile:1 -# Keep this syntax directive! It's used to enable Docker BuildKit - -# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865 -# but I try to keep it updated (see history) - -################################ -# PYTHON-BASE -# Sets up all our shared environment variables -################################ -FROM python:3.10-slim as python-base - -# python -ENV PYTHONUNBUFFERED=1 \ - # prevents python creating .pyc files - PYTHONDONTWRITEBYTECODE=1 \ - \ - # pip - PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_DEFAULT_TIMEOUT=100 \ - \ - # poetry - # https://python-poetry.org/docs/configuration/#using-environment-variables - POETRY_VERSION=1.8.2 \ - # make poetry install to this location - POETRY_HOME="/opt/poetry" \ - # make poetry create the virtual environment in the project's root - # it gets named `.venv` - POETRY_VIRTUALENVS_IN_PROJECT=true \ - # do not ask any interactive question - POETRY_NO_INTERACTION=1 \ - \ - # paths - # this is where our requirements + virtual environment will live - PYSETUP_PATH="/opt/pysetup" \ - VENV_PATH="/opt/pysetup/.venv" - - -# prepend poetry and venv to path -ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" - - -################################ -# BUILDER-BASE -# Used to build deps + create our virtual environment -################################ -FROM python-base as builder-base -RUN apt-get update \ - && apt-get install --no-install-recommends -y \ - # deps for installing poetry - curl \ - # deps for building python deps - build-essential - - -# install poetry - respects $POETRY_VERSION & $POETRY_HOME -# The --mount will mount the buildx cache directory to where -# Poetry and Pip store their cache so that they can reuse it -RUN --mount=type=cache,target=/root/.cache \ - curl -sSL https://install.python-poetry.org | python3 - - -# copy project requirement files here to ensure they will be cached. -WORKDIR $PYSETUP_PATH -# Copy just one file to avoid rebuilding the whole image -COPY poetry.lock pyproject.toml ./ -COPY ./src/backend/langflow ./src/backend/langflow -COPY ./src/backend/base/pyproject.toml ./src/backend/base/pyproject.toml -# Copy README.md to the build context -COPY README.md . -# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally -RUN --mount=type=cache,target=/root/.cache \ - poetry install --without dev --extras deploy - - -################################ -# DEVELOPMENT -# Image used during development / testing -################################ -FROM python-base as development -WORKDIR $PYSETUP_PATH - -# copy in our built poetry + venv -COPY --from=builder-base $POETRY_HOME $POETRY_HOME -COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH - -# Copy just one file to avoid rebuilding the whole image -COPY ./src/backend/langflow ./src/backend/langflow -# quicker install as runtime deps are already installed -RUN --mount=type=cache,target=/root/.cache \ - poetry install --with=dev --extras deploy - -# copy in our app code -COPY ./src/backend ./src/backend -RUN --mount=type=cache,target=/root/.cache \ - poetry install --with=dev --extras deploy -COPY ./tests ./tests= - diff --git a/docker-compose.debug.yml b/docker-compose.debug.yml deleted file mode 100644 index f81faf8d4..000000000 --- a/docker-compose.debug.yml +++ /dev/null @@ -1,33 +0,0 @@ -version: "3.4" - -services: - backend: - volumes: - - ./:/app - build: - context: ./ - dockerfile: ./dev.Dockerfile - command: - [ - "sh", - "-c", - "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --loop asyncio", - ] - ports: - - 7860:7860 - - 5678:5678 - restart: on-failure - - frontend: - build: - context: ./src/frontend - dockerfile: ./dev.Dockerfile - args: - - BACKEND_URL=http://backend:7860 - ports: - - "3000:3000" - volumes: - - ./src/frontend/public:/home/node/app/public - - ./src/frontend/src:/home/node/app/src - - ./src/frontend/package.json:/home/node/app/package.json - restart: on-failure diff --git a/docker/.dockerignore b/docker/.dockerignore new file mode 100644 index 000000000..737244fba --- /dev/null +++ b/docker/.dockerignore @@ -0,0 +1,9 @@ +.venv/ +**/aws +node_modules +**/node_modules/ +dist/ +**/build/ +src/backend/langflow/frontend +**/langflow-pre.db +**/langflow.db \ No newline at end of file diff --git a/build_and_push.Dockerfile b/docker/build_and_push.Dockerfile similarity index 89% rename from build_and_push.Dockerfile rename to docker/build_and_push.Dockerfile index 3e9e18dab..3a34db188 100644 --- a/build_and_push.Dockerfile +++ b/docker/build_and_push.Dockerfile @@ -72,15 +72,21 @@ COPY Makefile ./ COPY README.md ./ RUN --mount=type=cache,target=/root/.cache \ curl -sSL https://install.python-poetry.org | python3 - +RUN useradd -m -u 1000 user && \ + mkdir -p /app/langflow && \ + chown -R user:user /app && \ + chmod -R u+w /app/langflow + +# Update PATH with home/user/.local/bin +ENV PATH="/home/user/.local/bin:${PATH}" RUN python -m pip install requests && cd ./scripts && python update_dependencies.py RUN $POETRY_HOME/bin/poetry lock RUN $POETRY_HOME/bin/poetry build # Copy virtual environment and built .tar.gz from builder base -RUN useradd -m -u 1000 user USER user # Install the package from the .tar.gz -RUN python -m pip install /app/dist/*.tar.gz +RUN python -m pip install /app/dist/*.tar.gz --user ENTRYPOINT ["python", "-m", "langflow", "run"] CMD ["--host", "0.0.0.0", "--port", "7860"] diff --git a/build_and_push_base.Dockerfile b/docker/build_and_push_base.Dockerfile similarity index 90% rename from build_and_push_base.Dockerfile rename to docker/build_and_push_base.Dockerfile index 2c7cc2a07..f70a517da 100644 --- a/build_and_push_base.Dockerfile +++ b/docker/build_and_push_base.Dockerfile @@ -78,13 +78,20 @@ RUN cd src/frontend && npm run build COPY src/backend ./src/backend RUN cp -r src/frontend/build src/backend/base/langflow/frontend RUN rm -rf src/backend/base/dist +RUN useradd -m -u 1000 user && \ + mkdir -p /app/langflow && \ + chown -R user:user /app && \ + chmod -R u+w /app/langflow + +# Update PATH with home/user/.local/bin +ENV PATH="/home/user/.local/bin:${PATH}" RUN cd src/backend/base && $POETRY_HOME/bin/poetry build # Copy virtual environment and built .tar.gz from builder base -RUN useradd -m -u 1000 user + USER user # Install the package from the .tar.gz -RUN python -m pip install /app/src/backend/base/dist/*.tar.gz +RUN python -m pip install /app/src/backend/base/dist/*.tar.gz --user ENTRYPOINT ["python", "-m", "langflow", "run"] diff --git a/docker-compose.yml b/docker/cdk-docker-compose.yml similarity index 100% rename from docker-compose.yml rename to docker/cdk-docker-compose.yml diff --git a/cdk.Dockerfile b/docker/cdk.Dockerfile similarity index 100% rename from cdk.Dockerfile rename to docker/cdk.Dockerfile diff --git a/container-cmd-cdk.sh b/docker/container-cmd-cdk.sh similarity index 100% rename from container-cmd-cdk.sh rename to docker/container-cmd-cdk.sh diff --git a/dev.Dockerfile b/docker/dev.Dockerfile similarity index 100% rename from dev.Dockerfile rename to docker/dev.Dockerfile diff --git a/Dockerfile b/docker/render.Dockerfile similarity index 100% rename from Dockerfile rename to docker/render.Dockerfile diff --git a/docker_example/pre.docker-compose.yml b/docker_example/pre.docker-compose.yml index b2fa00903..3df573df5 100644 --- a/docker_example/pre.docker-compose.yml +++ b/docker_example/pre.docker-compose.yml @@ -10,9 +10,9 @@ services: environment: - LANGFLOW_DATABASE_URL=postgresql://langflow:langflow@postgres:5432/langflow # This variable defines where the logs, file storage, monitor data and secret keys are stored. - - LANGFLOW_CONFIG_DIR=/var/lib/langflow + - LANGFLOW_CONFIG_DIR=app/langflow volumes: - - langflow-data:/var/lib/langflow + - langflow-data:/app/langflow postgres: image: postgres:16 diff --git a/docs/docs/administration/api.mdx b/docs/docs/administration/api.mdx index 25dbeb31e..103c43f81 100644 --- a/docs/docs/administration/api.mdx +++ b/docs/docs/administration/api.mdx @@ -4,15 +4,15 @@ import Admonition from "@theme/Admonition"; # API Keys -## Introduction - -Langflow offers an API Key functionality that allows users to access their individual components and flows without going through traditional login authentication. The API Key is a user-specific token that can be included in the request's header or query parameter to authenticate API calls. The following documentation outlines how to generate, use, and manage these API Keys in Langflow. +Langflow provides an API key functionality that allows users to access their individual components and flows without traditional login authentication. The API key is a user-specific token that can be included in the request header or query parameter to authenticate API calls. This documentation outlines how to generate, use, and manage API keys in Langflow. - This feature requires the `LANGFLOW_AUTO_LOGIN` environment variable to be set - to `False`. The default user and password are set using _`LANGFLOW_SUPERUSER`_ - and _`LANGFLOW_SUPERUSER_PASSWORD`_ environment variables. Default values are - _`langflow`_ and _`langflow`_ respectively. + The default user and password are set using the LANGFLOW_SUPERUSER and + LANGFLOW_SUPERUSER_PASSWORD environment variables. + +The default values are +langflow and langflow, respectively. + ## Generating an API Key @@ -93,7 +93,7 @@ print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key)) ### Using the Query Parameter -Alternatively, you can include the API key as a query parameter in the URL: +Include the API key as a query parameter in the URL: ```bash curl -X POST \ @@ -146,9 +146,9 @@ print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key)) ## Security Considerations -- **Visibility**: The API key won't be retrievable again through the UI for security reasons. -- **Scope**: The key only allows access to the flows and components of the specific user to whom it was issued. +- **Visibility**: For security reasons, the API key cannot be retrieved again through the UI. +- **Scope**: The key allows access only to the flows and components of the specific user to whom it was issued. ## Revoking an API Key -To revoke an API key, simply delete it from the UI. This will immediately invalidate the key and prevent it from being used again. +To revoke an API key, delete it from the UI. This action immediately invalidates the key and prevents it from being used again. diff --git a/docs/docs/administration/cli.mdx b/docs/docs/administration/cli.mdx index 4f7c92983..a2a41adcd 100644 --- a/docs/docs/administration/cli.mdx +++ b/docs/docs/administration/cli.mdx @@ -1,4 +1,4 @@ -# 🖥️ Command Line Interface (CLI) +# Command Line Interface (CLI) ## Overview @@ -7,19 +7,18 @@ Langflow's Command Line Interface (CLI) is a powerful tool that allows you to in Running the CLI without any arguments will display a list of available commands and options. ```bash -python -m langflow --help +python -m langflow run --help # or -python -m langflow +python -m langflow run ``` -Each option is detailed below: +Each option for `run` command are detailed below: - `--help`: Displays all available options. - `--host`: Defines the host to bind the server to. Can be set using the `LANGFLOW_HOST` environment variable. The default is `127.0.0.1`. - `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`. - `--timeout`: Sets the worker timeout in seconds. The default is `60`. - `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`. -- `--config`: Defines the path to the configuration file. The default is `config.yaml`. - `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`. - `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`. - `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`. @@ -36,6 +35,41 @@ Each option is detailed below: These parameters are important for users who need to customize the behavior of Langflow, especially in development or specialized deployment scenarios. +### API Key Command + +The `api-key` command allows you to create an API key for accessing Langflow's API when `LANGFLOW_AUTO_LOGIN` is set to `True`. + +```bash +python -m langflow api-key --help + + Usage: langflow api-key [OPTIONS] + + Creates an API key for the default superuser if AUTO_LOGIN is enabled. + Args: log_level (str, optional): Logging level. Defaults to "error". + Returns: None + +╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ --log-level TEXT Logging level. [env var: LANGFLOW_LOG_LEVEL] [default: error] │ +│ --help Show this message and exit. │ +╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +``` + +Once you run the `api-key` command, it will create an API key for the default superuser if `LANGFLOW_AUTO_LOGIN` is set to `True`. + +```bash +python -m langflow api-key +╭─────────────────────────────────────────────────────────────────────╮ +│ API Key Created Successfully: │ +│ │ +│ sk-O0elzoWID1izAH8RUKrnnvyyMwIzHi2Wk-uXWoNJ2Ro │ +│ │ +│ This is the only time the API key will be displayed. │ +│ Make sure to store it in a secure location. │ +│ │ +│ The API key has been copied to your clipboard. Cmd + V to paste it. │ +╰─────────────────────────────────────────────────────────────────────╯ +``` + ### Environment Variables You can configure many of the CLI options using environment variables. These can be exported in your operating system or added to a `.env` file and loaded using the `--env-file` option. diff --git a/docs/docs/administration/custom-component.mdx b/docs/docs/administration/custom-component.mdx index 6decb3833..02a137d07 100644 --- a/docs/docs/administration/custom-component.mdx +++ b/docs/docs/administration/custom-component.mdx @@ -74,11 +74,6 @@ class DocumentProcessor(CustomComponent): - - Check out [FlowRunner Component](../examples/flow-runner) for a more complex - example. - - --- ## Rules @@ -391,13 +386,13 @@ The recommended way to load custom components is to set the _`LANGFLOW_COMPONENT ```bash export LANGFLOW_COMPONENTS_PATH='["/path/to/components"]' -langflow +langflow run ``` Alternatively, you can specify the path to your custom components using the _`--components-path`_ argument when running the Langflow CLI, as shown below: ```bash -langflow --components-path /path/to/components +langflow run --components-path /path/to/components ``` Langflow will attempt to load all of the components found in the specified directory. If a component fails to load due to errors in the component's code, Langflow will print an error message to the console but will continue loading the rest of the components. diff --git a/docs/docs/administration/global-env.mdx b/docs/docs/administration/global-env.mdx index fca7ec16a..c23ca8dd1 100644 --- a/docs/docs/administration/global-env.mdx +++ b/docs/docs/administration/global-env.mdx @@ -1,7 +1,8 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; import Admonition from "@theme/Admonition"; +import ReactPlayer from "react-player"; -# Global environment variables +# Global Environment Variables Langflow 1.0 alpha includes the option to add **Global Environment Variables** for your application. @@ -43,3 +44,11 @@ You now have a `openai_api_key` global environment variable for your Langflow pr 4. To view and manage your project's global environment variables, visit **Settings** > **Variables and Secrets**. For more on variables in HuggingFace Spaces, see [Managing Secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets). + +## Video + +
+ +
diff --git a/docs/docs/administration/langfuse_integration.mdx b/docs/docs/administration/langfuse_integration.mdx deleted file mode 100644 index 81f06e787..000000000 --- a/docs/docs/administration/langfuse_integration.mdx +++ /dev/null @@ -1,49 +0,0 @@ -# Integrating Langfuse with Langflow - -## Introduction - -Langfuse is an open-source tracing and analytics tool designed for LLM applications. Integrating Langfuse with Langflow provides detailed production traces and granular insights into quality, cost, and latency. This integration allows you to monitor and debug your Langflow's chat or APIs easily. - -## Step-by-Step Instructions - -### Step 1: Create a Langfuse account - -1. Go to [Langfuse](https://langfuse.com) and click on the "Sign In" button in the top right corner. -2. Click on the "Sign Up" button and create an account. -3. Once logged in, click on "Settings" and then on "Create new API keys." -4. Copy the Public key and the Secret Key and save them somewhere safe. - {/* Add these keys to your environment variables in the following step. */} - -### Step 2: Set up Langfuse in Langflow - -1. **Export the Environment Variables**: You'll need to export the environment variables `LANGFLOW_LANGFUSE_SECRET_KEY` and `LANGFLOW_LANGFUSE_PUBLIC_KEY` with the values obtained in Step 1. - - You can do this by executing the following commands in your terminal: - - ```bash - export LANGFLOW_LANGFUSE_SECRET_KEY= - export LANGFLOW_LANGFUSE_PUBLIC_KEY= - ``` - - Alternatively, you can run the Langflow CLI command: - - ```bash - LANGFLOW_LANGFUSE_SECRET_KEY= LANGFLOW_LANGFUSE_PUBLIC_KEY= langflow - ``` - - If you are self-hosting Langfuse, you can also set the environment variable `LANGFLOW_LANGFUSE_HOST` to point to your Langfuse instance. By default, Langfuse points to the cloud instance at `https://cloud.langfuse.com`. - -2. **Verify Integration**: Ensure that the environment variables are set correctly by checking their existence in your environment, for example by running: - - ```bash - echo $LANGFLOW_LANGFUSE_SECRET_KEY - echo $LANGFLOW_LANGFUSE_PUBLIC_KEY - ``` - -3. **Monitor Langflow**: Now, whenever you use Langflow's chat or API, you will be able to see the tracing of your conversations in Langfuse. - -That's it! You have successfully integrated Langfuse with Langflow, enhancing observability and debugging capabilities for your LLM application. - ---- - -Note: For more details or customized configurations, please refer to the official [Langfuse documentation](https://langfuse.com/docs/integrations/langchain). diff --git a/docs/docs/administration/login.mdx b/docs/docs/administration/login.mdx index 1d5a1d031..d5d7c1989 100644 --- a/docs/docs/administration/login.mdx +++ b/docs/docs/administration/login.mdx @@ -4,7 +4,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; import Admonition from "@theme/Admonition"; -# Sign up and Sign in +# Sign Up and Sign In ## Introduction diff --git a/docs/docs/administration/playground.mdx b/docs/docs/administration/playground.mdx index 57f3f2de0..08166729e 100644 --- a/docs/docs/administration/playground.mdx +++ b/docs/docs/administration/playground.mdx @@ -14,7 +14,7 @@ It even works for flows hosted on the Langflow store! As long as you have a flow's environment variables set, you can run it by clicking the **Playground** button. -1. From your **Collections** page, click **Playground** in one of your flows. +1. From your **Collections** page, click the **![Playground icon](/logos/botmessage.svg)Playground** in one of your flows. The **Playground** window opens. 2. Chat with your bot as you normally would, all without having to open the editor. + +## Video + +
+ +
diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx index 828677310..1c039bd2a 100644 --- a/docs/docs/components/custom.mdx +++ b/docs/docs/components/custom.mdx @@ -3,7 +3,8 @@ import Admonition from "@theme/Admonition"; # Custom Components - Read the [Custom Component Guidelines](../administration/custom-component) for detailed information on custom components. + Read the [Custom Component Guidelines](../administration/custom-component) for + detailed information on custom components. Custom components let you extend Langflow by creating reusable and configurable components from a Python script. @@ -31,57 +32,60 @@ This class is the foundation for creating custom components. It allows users to The following types are supported in the build method: -| Supported Types | -| --------------------------------------------------------- | -| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ | -| _`langflow.field_typing.NestedDict`_ | -| _`langflow.field_typing.Prompt`_ | -| _`langchain.chains.base.Chain`_ | -| _`langchain.PromptTemplate`_ | +| Supported Types | +| ----------------------------------------------------------------- | +| _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ | +| _`langflow.field_typing.NestedDict`_ | +| _`langflow.field_typing.Prompt`_ | +| _`langchain.chains.base.Chain`_ | +| _`langchain.PromptTemplate`_ | | _`from langchain.schema.language_model import BaseLanguageModel`_ | -| _`langchain.Tool`_ | -| _`langchain.document_loaders.base.BaseLoader`_ | -| _`langchain.schema.Document`_ | -| _`langchain.text_splitters.TextSplitter`_ | -| _`langchain.vectorstores.base.VectorStore`_ | -| _`langchain.embeddings.base.Embeddings`_ | -| _`langchain.schema.BaseRetriever`_ | +| _`langchain.Tool`_ | +| _`langchain.document_loaders.base.BaseLoader`_ | +| _`langchain.schema.Document`_ | +| _`langchain.text_splitters.TextSplitter`_ | +| _`langchain.vectorstores.base.VectorStore`_ | +| _`langchain.embeddings.base.Embeddings`_ | +| _`langchain.schema.BaseRetriever`_ | The difference between _`dict`_ and _`langflow.field_typing.NestedDict`_ is that one adds a simple key-value pair field, while the other opens a more robust dictionary editor. - Use the `Prompt` type by adding **kwargs to the build method. - If you want to add the values of the variables to the template you defined, format the `PromptTemplate` inside the `CustomComponent` class. + Use the `Prompt` type by adding **kwargs to the build method. If you want to + add the values of the variables to the template you defined, format the + `PromptTemplate` inside the `CustomComponent` class. - Use base Python types without a handle by default. To add handles, use the `input_types` key in the `build_config` method. + Use base Python types without a handle by default. To add handles, use the + `input_types` key in the `build_config` method. **build_config:** Defines the configuration fields of the component. This method returns a dictionary where each key represents a field name and each value defines the field's behavior. Supported keys for configuring fields: -| Key | Description | -| --------------------- | --------------------------------------------------- | -| `is_list` | Boolean indicating if the field can hold multiple values. | -| `options` | Dropdown menu options. | -| `multiline` | Boolean indicating if a field allows multiline input. | -| `input_types` | Allows connection handles for string fields. | -| `display_name` | Field name displayed in the UI. | -| `advanced` | Hides the field in the default UI view. | -| `password` | Masks input, useful for sensitive data. | -| `required` | Overrides the default behavior to make a field mandatory. | -| `info` | Tooltip for the field. | -| `file_types` | Accepted file types, useful for file fields. | -| `range_spec` | Defines valid ranges for float fields. | -| `title_case` | Boolean that controls field name capitalization. | -| `refresh_button` | Adds a refresh button that updates field values. | -| `real_time_refresh` | Updates the configuration as field values change. | -| `field_type` | Automatically set based on the build method's type hint. | +| Key | Description | +| ------------------- | --------------------------------------------------------- | +| `is_list` | Boolean indicating if the field can hold multiple values. | +| `options` | Dropdown menu options. | +| `multiline` | Boolean indicating if a field allows multiline input. | +| `input_types` | Allows connection handles for string fields. | +| `display_name` | Field name displayed in the UI. | +| `advanced` | Hides the field in the default UI view. | +| `password` | Masks input, useful for sensitive data. | +| `required` | Overrides the default behavior to make a field mandatory. | +| `info` | Tooltip for the field. | +| `file_types` | Accepted file types, useful for file fields. | +| `range_spec` | Defines valid ranges for float fields. | +| `title_case` | Boolean that controls field name capitalization. | +| `refresh_button` | Adds a refresh button that updates field values. | +| `real_time_refresh` | Updates the configuration as field values change. | +| `field_type` | Automatically set based on the build method's type hint. | - Use the `update_build_config` method to dynamically update configurations based on field values. + Use the `update_build_config` method to dynamically update configurations + based on field values. ## Additional methods and attributes @@ -99,8 +103,3 @@ The `CustomComponent` class also provides helpful methods for specific tasks (e. - `status`: Shows values from the `build` method, useful for debugging. - `field_order`: Controls the display order of fields. - `icon`: Sets the canvas display icon. - - - Check out the [FlowRunner](../examples/flow-runner) example to understand how to call a flow from a custom component. - - diff --git a/docs/docs/contributing/contribute-component.md b/docs/docs/contributing/contribute-component.md index b178a5882..f638434e2 100644 --- a/docs/docs/contributing/contribute-component.md +++ b/docs/docs/contributing/contribute-component.md @@ -1,6 +1,6 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; -# How to contribute components? +# How to Contribute Components? As of Langflow 1.0 alpha, new components are added as objects of the [CustomComponent](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/interface/custom/custom_component/custom_component.py) class and any dependencies are added to the [pyproject.toml](https://github.com/langflow-ai/langflow/blob/dev/pyproject.toml#L27) file. diff --git a/docs/docs/examples/buffer-memory.mdx b/docs/docs/examples/buffer-memory.mdx deleted file mode 100644 index b196f9031..000000000 --- a/docs/docs/examples/buffer-memory.mdx +++ /dev/null @@ -1,35 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# Buffer Memory - -For certain applications, retaining past interactions is crucial. For that, chains and agents may accept a memory component as one of their input parameters. The `ConversationBufferMemory` component is one of them. It stores messages and extracts them into variables. - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`ConversationBufferMemory`](https://python.langchain.com/docs/modules/memory/types/buffer) -- [`ConversationChain`](https://python.langchain.com/docs/modules/chains/) -- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - - diff --git a/docs/docs/examples/chat-memory.mdx b/docs/docs/examples/chat-memory.mdx new file mode 100644 index 000000000..88dbbca2b --- /dev/null +++ b/docs/docs/examples/chat-memory.mdx @@ -0,0 +1,17 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Chat Memory + +The **Chat Memory** component restores previous messages given a Session ID, which can be any string. + +This component is available under the **Helpers** tab of the Langflow preview. + +
+ +
diff --git a/docs/docs/examples/combine-text.mdx b/docs/docs/examples/combine-text.mdx new file mode 100644 index 000000000..5a4e86cf0 --- /dev/null +++ b/docs/docs/examples/combine-text.mdx @@ -0,0 +1,21 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Combine Text + +With LLM pipelines, combining text from different sources may be as important as splitting text. + +The **Combine Text** component concatenates two text inputs into a single chunk using a specified delimiter, such as whitespace or a newline. + +Also, check out **Combine Texts (Unsorted)** as a similar alternative. + +This component is available under the **Helpers** tab of the Langflow preview. + +
+ +
diff --git a/docs/docs/examples/conversation-chain.mdx b/docs/docs/examples/conversation-chain.mdx deleted file mode 100644 index 294d1b440..000000000 --- a/docs/docs/examples/conversation-chain.mdx +++ /dev/null @@ -1,41 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# Conversation Chain - -This example shows how to instantiate a simple `ConversationChain` component using a Language Model (LLM). Once the Node Status turns green 🟢, the chat will be ready to take in user messages. Here, we used `ChatOpenAI` to act as the required LLM input, but you can use any LLM for this purpose. - - - -Make sure to always get the API key from the provider. - - - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`ConversationChain`](https://python.langchain.com/docs/modules/chains/) -- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - - diff --git a/docs/docs/examples/create-record.mdx b/docs/docs/examples/create-record.mdx new file mode 100644 index 000000000..aa7a886f4 --- /dev/null +++ b/docs/docs/examples/create-record.mdx @@ -0,0 +1,17 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Create Record + +In Langflow, a `Record` has a structure very similar to a Python dictionary. It is a key-value pair data structure. + +The **Create Record** component allows you to dynamically create a `Record` from a specified number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15 😅). Once you've chosen the number of `Records`, add keys and fill up values, or pass on values from other components to the component using the input handles. + +
+ +
diff --git a/docs/docs/examples/csv-loader.mdx b/docs/docs/examples/csv-loader.mdx deleted file mode 100644 index 25f3bb444..000000000 --- a/docs/docs/examples/csv-loader.mdx +++ /dev/null @@ -1,57 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# CSV Loader - -The `VectoStoreAgent` component retrieves information from one or more vector stores. This example shows a `VectoStoreAgent` connected to a CSV file through the `Chroma` vector store. Process description: - -- The `CSVLoader` loads a CSV file into a list of documents. -- The extracted data is then processed by the `CharacterTextSplitter`, which splits the text into small, meaningful chunks (usually sentences). -- These chunks feed the `Chroma` vector store, which converts them into vectors and stores them for fast indexing. -- Finally, the agent accesses the information of the vector store through the `VectorStoreInfo` tool. - - - The vector store is used for efficient semantic search, while - `VectorStoreInfo` carries information about it, such as its name and - description. Embeddings are a way to represent words, phrases, or any entities - in a vector space. Learn more about them - [here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). - - - - Once you build this flow, ask questions about the data in the chat interface - (e.g., number of rows or columns). - - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`CSVLoader`](https://python.langchain.com/docs/integrations/document_loaders/csv) -- [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter) -- [`OpenAIEmbedding`](https://python.langchain.com/docs/integrations/text_embedding/openai) -- [`Chroma`](https://python.langchain.com/docs/integrations/vectorstores/chroma) -- [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/) -- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) -- [`VectorStoreAgent`](https://js.langchain.com/docs/modules/agents/tools/how_to/agents_with_vectorstores) - - diff --git a/docs/docs/examples/flow-runner.mdx b/docs/docs/examples/flow-runner.mdx deleted file mode 100644 index fda7a8d39..000000000 --- a/docs/docs/examples/flow-runner.mdx +++ /dev/null @@ -1,368 +0,0 @@ ---- -description: Custom Components -hide_table_of_contents: true ---- - -# FlowRunner Component - -The CustomComponent class allows us to create components that interact with Langflow itself. In this example, we will make a component that runs other flows available in "My Collection". - - - -We will cover how to: - -- List Collection flows using the _`list_flows`_ method. -- Load a flow using the _`load_flow`_ method. -- Configure a dropdown input field using the _`options`_ parameter. - -
- -Example Code - -```python -from langflow.custom import CustomComponent -from langchain.schema import Document - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - flows = self.list_flows() - flow_names = [f.name for f in flows] - return {"flow_name": {"options": flow_names, - "display_name": "Flow Name", - }, - "document": {"display_name": "Document"} - } - - - def build(self, flow_name: str, document: Document) -> Document: - # List the flows - flows = self.list_flows() - # Get the flow that matches the selected name - # You can also get the flow by id - # using self.get_flow(flow_id=flow_id) - tweaks = {} - flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) - # Get the page_content from the document - if document and isinstance(document, list): - document = document[0] - page_content = document.page_content - # Use it in the flow - result = flow(page_content) - return Document(page_content=str(result)) - -``` - -
- - - -```python -from langflow.custom import CustomComponent - - -class MyComponent(CustomComponent): - display_name = "Custom Component" - description = "This is a custom component" - - def build_config(self): - ... - - def build(self): - ... - -``` - -The typical structure of a Custom Component is composed of _`display_name`_ and _`description`_ attributes, _`build`_ and _`build_config`_ methods. - ---- - -```python -from langflow.custom import CustomComponent - - -# focus -class FlowRunner(CustomComponent): - # focus - display_name = "Flow Runner" - # focus - description = "Run other flows" - - def build_config(self): - ... - - def build(self): - ... - -``` - -Let's start by defining our component's _`display_name`_ and _`description`_. - ---- - -```python -from langflow.custom import CustomComponent -# focus -from langchain.schema import Document - - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - ... - - def build(self): - ... - -``` - -Second, we will import _`Document`_ from the [_langchain.schema_](https://docs.langchain.com/docs/components/schema/) module. This will be the return type of the _`build`_ method. - ---- - -```python -from langflow.custom import CustomComponent -# focus -from langchain.schema import Document - - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - ... - - # focus - def build(self, flow_name: str, document: Document) -> Document: - ... - -``` - -Now, let's add the [parameters](focus://11[20:55]) and the [return type](focus://11[60:69]) to the _`build`_ method. The parameters added are: - -- _`flow_name`_ is the name of the flow we want to run. -- _`document`_ is the input document to be passed to that flow. - - Since _`Document`_ is a Langchain type, it will add an input [handle](../administration/components) to the component ([see more](../components/custom)). - ---- - -```python focus=13:14 -from langflow.custom import CustomComponent -from langchain.schema import Document - - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - ... - - def build(self, flow_name: str, document: Document) -> Document: - # List the flows - flows = self.list_flows() - -``` - -We can now start writing the _`build`_ method. Let's list available flows in "My Collection" using the _`list_flows`_ method. - ---- - -```python focus=15:18 -from langflow.custom import CustomComponent -from langchain.schema import Document - - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - ... - - def build(self, flow_name: str, document: Document) -> Document: - # List the flows - flows = self.list_flows() - # Get the flow that matches the selected name - # You can also get the flow by id - # using self.get_flow(flow_id=flow_id) - tweaks = {} - flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) - -``` - -And retrieve a flow that matches the selected name (we'll make a dropdown input field for the user to choose among flow names). - - - From version 0.4.0, names are unique, which was not the case in previous - versions. This might lead to unexpected results if using flows with the same - name. - - ---- - -```python -from langflow.custom import CustomComponent -from langchain.schema import Document - - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - ... - - def build(self, flow_name: str, document: Document) -> Document: - # List the flows - flows = self.list_flows() - # Get the flow that matches the selected name - # You can also get the flow by id - # using self.get_flow(flow_id=flow_id) - tweaks = {} - flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) - - -``` - -You can load this flow using _`get_flow`_ and set a _`tweaks`_ dictionary to customize it. Find more about tweaks in our [features guidelines](../administration/features#code). - ---- - -```python -from langflow.custom import CustomComponent -from langchain.schema import Document - - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - ... - - def build(self, flow_name: str, document: Document) -> Document: - # List the flows - flows = self.list_flows() - # Get the flow that matches the selected name - # You can also get the flow by id - # using self.get_flow(flow_id=flow_id) - tweaks = {} - flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) - # Get the page_content from the document - if document and isinstance(document, list): - document = document[0] - page_content = document.page_content - # Use it in the flow - result = flow(page_content) - return Document(page_content=str(result)) -``` - -We are using a _`Document`_ as input because it is a straightforward way to pass text data in Langflow (specifically because you can connect it to many [loaders](../components/loaders)). -Generally, a flow will take a string or a dictionary as input because that's what LangChain components expect. -In case you are passing a dictionary, you need to build it according to the needs of the flow you are using. - -The content of a document can be extracted using the _`page_content`_ attribute, which is a string, and passed as an argument to the selected flow. - ---- - -```python focus=9:16 -from langflow.custom import CustomComponent -from langchain.schema import Document - - -class FlowRunner(CustomComponent): - display_name = "Flow Runner" - description = "Run other flows using a document as input." - - def build_config(self): - flows = self.list_flows() - flow_names = [f.name for f in flows] - return {"flow_name": {"options": flow_names, - "display_name": "Flow Name", - }, - "document": {"display_name": "Document"} - } - - def build(self, flow_name: str, document: Document) -> Document: - # List the flows - flows = self.list_flows() - # Get the flow that matches the selected name - # You can also get the flow by id - # using self.get_flow(flow_id=flow_id) - tweaks = {} - flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) - # Get the page_content from the document - if document and isinstance(document, list): - document = document[0] - page_content = document.page_content - # Use it in the flow - result = flow(page_content) - return Document(page_content=str(result)) -``` - -Finally, we can add field customizations through the _`build_config`_ method. Here we added the _`options`_ key to make the _`flow_name`_ field a dropdown menu. Check out the [custom component reference](../components/custom) for a list of available keys. - - - Make sure that the field type is _`str`_ and _`options`_ values are strings. - - - - -Done! This is what our script and custom component looks like: - -
- - - - - -
- -import ZoomableImage from "/src/theme/ZoomableImage.js"; -import Admonition from "@theme/Admonition"; diff --git a/docs/docs/examples/pass.mdx b/docs/docs/examples/pass.mdx new file mode 100644 index 000000000..ddfe35cca --- /dev/null +++ b/docs/docs/examples/pass.mdx @@ -0,0 +1,17 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Pass + +Sometimes all you need to do is… nothing! + +The **Pass** component enables you to ignore one input and move forward with another one. This is super helpful to swap routes for A/B testing! + +
+ +
diff --git a/docs/docs/examples/python-function.mdx b/docs/docs/examples/python-function.mdx deleted file mode 100644 index 2bb4b93e1..000000000 --- a/docs/docs/examples/python-function.mdx +++ /dev/null @@ -1,62 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# Python Function - -Langflow allows you to create a customized tool using the `PythonFunction` connected to a `Tool` component. In this example, Regex is used in Python to validate a pattern. - -```python -import re - -def is_brazilian_zipcode(zipcode: str) -> bool: - pattern = r"\d{5}-?\d{3}" - - # Check if the zip code matches the pattern - if re.match(pattern, zipcode): - return True - - return False -``` - - - When a tool is called, it is often desirable to have its output returned - directly to the user. You can do this by setting the **return_direct** flag - for a tool to be True. - - -The `AgentInitializer` component is a quick way to construct an agent from the model and tools. - - - The `PythonFunction` is a custom component that uses the LangChain 🦜🔗 tool - decorator. Learn more about it - [here](https://python.langchain.com/docs/modules/agents/tools/custom_tools). - - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`PythonFunctionTool`](https://python.langchain.com/docs/modules/agents/tools/custom_tools) -- [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) -- [`AgentInitializer`](https://python.langchain.com/docs/modules/agents/) - - diff --git a/docs/docs/examples/searchapi-tool.mdx b/docs/docs/examples/searchapi-tool.mdx deleted file mode 100644 index d3cb4734a..000000000 --- a/docs/docs/examples/searchapi-tool.mdx +++ /dev/null @@ -1,52 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# SearchApi Tool - -The [SearchApi](https://www.searchapi.io/) allows developers to retrieve results from search engines such as Google, Google Scholar, YouTube, YouTube transcripts, and more, and can be used as in Langflow through the `SearchApi` tool. - - - To use the SearchApi, you must first obtain an API key by registering at [SearchApi's website](https://www.searchapi.io/). - - -In the given example, we specify `engine` as `youtube_transcripts` and provide a `video_id`. - - - All engines and parameters can be found in [SearchApi documentation](https://www.searchapi.io/docs/google). - - -The `RetrievalQA` chain processes a `Document` along with a user's question to return an answer. - - - In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the - LLM, but feel free to experiment with other Language Models! - - -The `RetrievalQA` takes `CombineDocsChain` and `SearchApi` tool as inputs, using the tool as a `Document` to answer questions. - - - Learn more about the SearchApi - [here](https://python.langchain.com/docs/integrations/tools/searchapi). - - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) -- [`SearchApiAPIWrapper`](https://python.langchain.com/docs/integrations/providers/searchapi#wrappers) -- [`ZeroShotAgent`](https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent) - - \ No newline at end of file diff --git a/docs/docs/examples/serp-api-tool.mdx b/docs/docs/examples/serp-api-tool.mdx deleted file mode 100644 index 175b6f1be..000000000 --- a/docs/docs/examples/serp-api-tool.mdx +++ /dev/null @@ -1,58 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# Serp API Tool - -The [Serp API](https://serpapi.com/) (Search Engine Results Page) allows developers to scrape results from search engines such as Google, Bing and Yahoo, and can be used as in Langflow through the `Search` component. - - - To use the Serp API, you first need to sign up [Serp - API](https://serpapi.com/) for an API key on the provider's website. - - -Here, the `ZeroShotPrompt` component specifies a prompt template for the `ZeroShotAgent`. Set a _Prefix_ and _Suffix_ with rules for the agent to obey. In the example, we used default templates. - -The `LLMChain` is a simple chain that takes in a prompt template, formats it with the user input, and returns the response from an LLM. - - - In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the - LLM, but feel free to experiment with other Language Models! - - -The `ZeroShotAgent` takes the `LLMChain` and the `Search` tool as inputs, using the tool to find information when necessary. - - - Learn more about the Serp API - [here](https://python.langchain.com/docs/integrations/providers/serpapi ). - - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`ZeroShotPrompt`](https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/) -- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) -- [`LLMChain`](https://python.langchain.com/docs/modules/chains/foundational/llm_chain) -- [`Search`](https://python.langchain.com/docs/integrations/providers/serpapi) -- [`ZeroShotAgent`](https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent) - - diff --git a/docs/docs/examples/store-message.mdx b/docs/docs/examples/store-message.mdx new file mode 100644 index 000000000..75ff0bd46 --- /dev/null +++ b/docs/docs/examples/store-message.mdx @@ -0,0 +1,17 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Store Message + +The **Store Message** component allows you to save information under a specified Session ID and sender type. + +The **Message History** component can then be used to retrieve stored messages. + +
+ +
diff --git a/docs/docs/examples/sub-flow.mdx b/docs/docs/examples/sub-flow.mdx new file mode 100644 index 000000000..d2b9674ad --- /dev/null +++ b/docs/docs/examples/sub-flow.mdx @@ -0,0 +1,15 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Sub Flow + +The **Sub Flow** component enables a user to select a previously built flow and dynamically generate a component out of it. + +
+ +
diff --git a/docs/docs/examples/text-operator.mdx b/docs/docs/examples/text-operator.mdx new file mode 100644 index 000000000..50d52fdbf --- /dev/null +++ b/docs/docs/examples/text-operator.mdx @@ -0,0 +1,15 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Text Operator + +The **Text Operator** component simplifies logic. It evaluates the results from another component (for example, if the input text exactly equals `Tuna`) and runs another component based on the results. Basically, the text operator is an if/else component for your flow. + +
+ +
diff --git a/docs/docs/getting-started/canvas.mdx b/docs/docs/getting-started/canvas.mdx new file mode 100644 index 000000000..5974f245b --- /dev/null +++ b/docs/docs/getting-started/canvas.mdx @@ -0,0 +1,282 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# 🎨 Langflow Canvas + +The **Langflow canvas** is the central hub of Langflow, where you'll assemble new flows from components, run them, and see the results. + +To get a feel for the canvas, we'll examine a basic prompting flow. +You can either build this flow yourself, or select **New Project** > **Basic prompting** to open a canvas with the flow pre-built. + + + +## Flows, components, collections, and projects + +A [flow](#flow) is a pipeline of components connected together in the Langflow canvas. + +A [component](#component) is a single building block within a flow. A component has inputs, outputs, and parameters that define its functionality. + +A [collection](#collection) is a snapshot of the flows available in your database. Collections can be downloaded to local storage and uploaded for future use. + +A [project](#project) can be a component or a flow. Projects are saved as part of your collection. + +For example, the **OpenAI LLM** is a **component** of the **Basic prompting** flow, and the **flow** is stored in a **collection**. + +## Flow + +A **flow** is a pipeline of components connected together in the Langflow canvas. + +For example, the [Basic prompting](../starter-projects/basic-prompting.mdx) flow is a pipeline of four components: + + + +In this flow, the **OpenAI LLM component** receives input (left side) and produces output (right side) - in this case, receiving input from the **Chat Input** and **Prompt** components and producing output to the **Chat Output** component. + +## Component + +Components are the building blocks of flows. They consist of inputs, outputs, and parameters that define their functionality. These elements provide a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://python.langchain.com/docs/integrations/components). + +
+ During the flow creation process, you will notice handles (colored circles) + attached to one or both sides of a component. These handles represent the + availability to connect to other components. Hover over a handle to see + connection details. +
+ +
+ For example, if you select a ConversationChain component, you + will see orange o and purple{" "} + o input handles. They indicate that + this component accepts an LLM and a Memory component as inputs. The red + asterisk * means that at least one input + of that type is required. +
+ +{" "} + + + +
+In the top right corner of the component, you'll find the component status icon (![Status icon](/logos/playbutton.svg)). +Build the flow by clicking the **![Playground icon](/logos/botmessage.svg)Playground** at the bottom right of the canvas. + +Once the validation is complete, the status of each validated component should turn green (![Status icon](/logos/greencheck.svg)). +To debug, hover over the component status to see the outputs. + +
+ +--- + +### Component Parameters + +Langflow components can be edited by clicking the component settings button. Hide parameters to reduce complexity and keep the canvas clean and intuitive for experimentation. + +
+ +
+ +### Component menu + +Each component is a little unique, but they will all have a menu bar on top that looks something like this. +The menu options are **Code**, **Save**, **Duplicate**, and **More**. + + + +### Code menu + +The **Code** button displays your component's Python code. +You can modify the code and save it. + +#### Save + +Save your component to the **Saved** components folder for re-use. + +#### Duplicate + +Duplicate your component in the canvas. + +#### More + +**Advanced** - modify the parameters of your component. + +
+ +
+ +**Copy** - copy your component. + +**Share** - share your component to the Langflow store. + +**Docs** - view documentation for your component. + +**Delete** - delete your component. + +### Group multiple components + +Components without input or output nodes can be grouped into a single component for re-use. +This is useful for combining large flows into single components (like RAG with a vector database, for example) and saves space in the canvas. + +1. Hold **Shift** and drag to select the **Prompt** and **OpenAI** components. +2. Select **Group**. +3. The components merge into a single component. +4. To save the new component, select **Save**. It can now be re-used from the **Saved** components folder. + +## Playground + +Run your flow by clicking the **![Playground icon](/logos/botmessage.svg)Playground** button. + +For more, see [Playground](../administration/playground.mdx). + +## API + +The **API** button opens the API window, where Langflow presents code for integrating your flow into external applications. + +Modify the call's parameters in the **Tweaks** window, click the **Copy Code** or **Download** buttons, and paste your code where you want to use it. + + + +### curl + +The **curl** tab displays sample code for posting a query to your flow. +Modify the `input_value` to change your input message. + +```curl +curl -X POST \ + http://127.0.0.1:7863/api/v1/run/f2eefd80-bb91-4190-9279-0d6ffafeaac4\?stream\=false \ + -H 'Content-Type: application/json'\ + -d '{"input_value": "is anybody there?", + "output_type": "chat", + "input_type": "chat", + "tweaks": { + "Prompt-uxBqP": {}, + "OpenAIModel-k39HS": {}, + "ChatOutput-njtka": {}, + "ChatInput-P3fgL": {} +}}' +``` + +Result: + +``` +{"session_id":"f2eefd80-bb91-4190-9279-0d6ffafeaac4:53856a772b8e1cfcb3dd2e71576b5215399e95bae318d3c02101c81b7c252da3","outputs":[{"inputs":{"input_value":"is anybody there?"},"outputs":[{"results":{"result":"Arrr, me hearties! Aye, this be Captain [Your Name] speakin'. What be ye needin', matey?"},"artifacts":{"message":"Arrr, me hearties! Aye, this be Captain [Your Name] speakin'. What be ye needin', matey?","sender":"Machine","sender_name":"AI"},"messages":[{"message":"Arrr, me hearties! Aye, this be Captain [Your Name] speakin'. What be ye needin', matey?","sender":"Machine","sender_name":"AI","component_id":"ChatOutput-njtka"}],"component_display_name":"Chat Output","component_id":"ChatOutput-njtka"}]}]}% +``` + +### Python API + +The **Python API** tab displays code to interact with your flow using the Python HTTP requests library. + +### Python Code + +The **Python Code** tab displays code to interact with your flow's `.json` file using the Langflow runtime. + +### Chat Widget HTML + +The **Chat Widget HTML** tab displays code that can be inserted in the `` of your HTML to interact with your flow. +For more, see the [Chat widget documentation](../administration/chat-widget.mdx). + +### Tweaks + +The **Tweaks** tab displays the available parameters for your flow. +Modifying the parameters changes the code parameters across all windows. +For example, changing the **Chat Input** component's `input_value` will change that value across all API calls. + +
+ +
+ +## Collection + +A collection is a snapshot of flows available in a database. + +Collections can be downloaded to local storage and uploaded for future use. + +
+ +
+ +## Project + +A **Project** can be a flow or a component. To view your saved projects, select **My Collection**. + +Your **Projects** are displayed. + +Click the **![Playground icon](/logos/botmessage.svg) Playground** button to run a flow from the **My Collection** screen. + +In the top left corner of the screen are options for **Download Collection**, **Upload Collection**, and **New Project**. + +Select **Download Collection** to save your project to your local machine. This downloads all flows and components as a `.json` file. + +Select **Upload Collection** to upload a flow or component `.json` file from your local machine. + +Select **New Project** to create a new project. In addition to a blank canvas, [starter projects](../starter-projects/basic-prompting.mdx) are also available. + +## Project options menu + +To see options for your project, in the upper left corner of the canvas, select the dropdown menu. + + + +**New** - Start a new project. + +**Duplicate** - Duplicate the current flow as a new project. + +**Settings** - Modify the project's **Name** or **Description**. + +**Import** - Upload a flow `.json` file from your local machine. + +**Export** - Download your current project to your local machine as a `.json` file. + +**Undo** or **Redo** - Undo or redo your last action. diff --git a/docs/docs/getting-started/flows-components-collections.mdx b/docs/docs/getting-started/flows-components-collections.mdx new file mode 100644 index 000000000..335fb5c12 --- /dev/null +++ b/docs/docs/getting-started/flows-components-collections.mdx @@ -0,0 +1,20 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; + +# 🖥️ Flows, components, collections, and projects + +## TL;DR + +A [flow](#flow) is a pipeline of components connected together in the Langflow canvas. + +A [component](#component) is a single building block within a flow. A component has inputs, outputs, and parameters that define its functionality. + +A [collection](#collection) is a snapshot of the flows available in your database. Collections can be downloaded to local storage and uploaded for future use. + +A [project](#project) can be a component or a flow. Projects are saved as part of your collection. + +For example, the **OpenAI LLM** is a **component** of the **Basic prompting** flow, and the **flow** is stored in a **collection**. + +## Component diff --git a/docs/docs/getting-started/huggingface-spaces.mdx b/docs/docs/getting-started/huggingface-spaces.mdx deleted file mode 100644 index d2eccc782..000000000 --- a/docs/docs/getting-started/huggingface-spaces.mdx +++ /dev/null @@ -1,38 +0,0 @@ -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; -import Admonition from "@theme/Admonition"; - -# 🤗 HuggingFace Spaces - -HuggingFace provides a great alternative for running Langflow in their Spaces environment. This means you can run Langflow without any local installation required. - -In a Chromium-based browser, go to the [Langflow Space](https://huggingface.co/spaces/Langflow/Langflow?duplicate=true) or [Langflow v1.0 alpha Preview Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). - -You'll be presented with the following screen: - - - -Name your Space, define the visibility (Public or Private), and click on **Duplicate Space** to start the installation process. When installation is finished, you'll be redirected to the Space's main page to start using Langflow right away! - -## Run a starter project - -Langflow provides a range of example flows to help you get started. - -Once you get Langflow running in your Space, click on **New Project** in the top right corner of the screen. - -Select a starter project from the list, set up your API keys, and click ⚡ Run. This will open up Langflow's Interaction Panel with the chat console, text inputs, and outputs ready to go. - -For more information on the starter projects, see the guides below: - -* [Basic prompting](/starter-projects/basic-prompting.mdx) -* [Memory chatbot](/starter-projects/memory-chatbot.mdx) -* [Blog writer](/starter-projects/blog-writer.mdx) -* [Document QA](/starter-projects/document-qa.mdx) \ No newline at end of file diff --git a/docs/docs/getting-started/install-langflow.mdx b/docs/docs/getting-started/install-langflow.mdx index 7a567d597..4beb5e362 100644 --- a/docs/docs/getting-started/install-langflow.mdx +++ b/docs/docs/getting-started/install-langflow.mdx @@ -6,35 +6,40 @@ import Admonition from "@theme/Admonition"; # 📦 Install Langflow - - Langflow v1.0 alpha is also available in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). Try it out or follow the instructions [here](./huggingface-spaces) to install it locally. - + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true), + to create your own Langflow workspace in minutes. -Langflow requires [Python 3.10](https://www.python.org/downloads/release/python-3100/) and [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/) to be installed on your system. +Langflow requires [Python >=3.10](https://www.python.org/downloads/release/python-3100/) and [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/) to be installed on your system. Install Langflow with pip: + ```bash python -m pip install langflow -U ``` Install Langflow with pipx: + ```bash pipx install langflow --python python3.10 --fetch-missing-python ``` -Pipx can fetch the missing Python version for you with `--fetch-missing-python`, but you can also install the Python version manually. +Pipx can fetch the missing Python version for you with `--fetch-missing-python`, but you can also install the Python version manually. ## Install Langflow pre-release To install a pre-release version of Langflow: pip: + ```bash python -m pip install langflow --pre --force-reinstall ``` pipx: + ```bash pipx install langflow --python python3.10 --fetch-missing-python --pip-args="--pre --force-reinstall" ``` @@ -54,11 +59,13 @@ python -m langflow --help ## ⛓️ Run Langflow 1. To run Langflow, enter the following command. + ```bash python -m langflow run ``` 2. Confirm that a local Langflow instance starts by visiting `http://127.0.0.1:7860` in a Chromium-based browser. + ```bash │ Welcome to ⛓ Langflow │ │ │ @@ -66,4 +73,23 @@ python -m langflow run │ Collaborate, and contribute at our GitHub Repo 🚀 │ ``` -3. Continue on to the [Quickstart](./quickstart.mdx). \ No newline at end of file +3. Continue on to the [Quickstart](./quickstart.mdx). + +## HuggingFace Spaces + +HuggingFace provides a great alternative for running Langflow in their Spaces environment. This means you can run Langflow without any local installation required. + +In a Chromium-based browser, go to the [Langflow Space](https://huggingface.co/spaces/Langflow/Langflow?duplicate=true) or [Langflow v1.0 alpha Preview Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). + +You'll be presented with the following screen: + + + +Name your Space, define the visibility (Public or Private), and click on **Duplicate Space** to start the installation process. When installation is finished, you'll be redirected to the Space's main page to start using Langflow right away! diff --git a/docs/docs/getting-started/quickstart.mdx b/docs/docs/getting-started/quickstart.mdx index f54a07747..3f02db27f 100644 --- a/docs/docs/getting-started/quickstart.mdx +++ b/docs/docs/getting-started/quickstart.mdx @@ -10,12 +10,15 @@ This guide demonstrates how to build a basic prompt flow and modify that prompt ## Prerequisites -* [Langflow installed](./install-langflow.mdx) +- [Langflow installed and running](./install-langflow.mdx) -* [OpenAI API key](https://platform.openai.com) +- [OpenAI API key](https://platform.openai.com) - Langflow v1.0 alpha is also available in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). Try it out or follow the instructions [here](./huggingface-spaces) to install it locally. + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) + to create your own Langflow workspace in minutes. ## Hello World - Basic Prompting @@ -44,25 +47,25 @@ Examine the **Prompt** component. The **Template** field instructs the LLM to `A This should be interesting... 4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**. - 1. In the **Variable Name** field, enter `openai_api_key`. - 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). - 3. Click **Save Variable**. + 1. In the **Variable Name** field, enter `openai_api_key`. + 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). + 3. Click **Save Variable**. ## Run the basic prompting flow 1. Click the **Run** button. -The **Interaction Panel** opens, where you can chat with your bot. + The **Interaction Panel** opens, where you can chat with your bot. 2. Type a message and press Enter. -And... Ahoy! 🏴‍☠️ -The bot responds in a piratical manner! + And... Ahoy! 🏴‍☠️ + The bot responds in a piratical manner! ## Modify the prompt for a different result 1. To modify your prompt results, in the **Prompt** template, click the **Template** field. -The **Edit Prompt** window opens. + The **Edit Prompt** window opens. 2. Change `Answer the user as if you were a pirate` to a different character, perhaps `Answer the user as if you were Harold Abelson.` 3. Run the basic prompting flow again. -The response will be markedly different. + The response will be markedly different. ## Next steps @@ -72,8 +75,6 @@ By adding Langflow components to your flow, you can create all sorts of interest Here are a couple of examples: -* [Memory chatbot](/starter-projects/memory-chatbot.mdx) -* [Blog writer](/starter-projects/blog-writer.mdx) -* [Document QA](/starter-projects/document-qa.mdx) - - +- [Memory chatbot](/starter-projects/memory-chatbot.mdx) +- [Blog writer](/starter-projects/blog-writer.mdx) +- [Document QA](/starter-projects/document-qa.mdx) diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx index e807ee086..5ccb8d7a0 100644 --- a/docs/docs/index.mdx +++ b/docs/docs/index.mdx @@ -26,9 +26,14 @@ Its intuitive interface allows for easy manipulation of AI building blocks, enab - [Quickstart](/getting-started/quickstart) - Create a flow and run it. -- [HuggingFace Spaces](/getting-started/huggingface-spaces) - Duplicate the Langflow preview space and try it out before you install. +- [Langflow Canvas](/getting-started/canvas) - Learn more about the Langflow canvas. -- [New to LLMs?](/getting-started/new-to-llms) - Learn more about LLMs, prompting, and more at [promptingguide.ai](https://promptingguide.ai). + + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) + to create your own Langflow workspace in minutes. + ## Learn more about Langflow 1.0 diff --git a/docs/docs/integrations/notion/add-content-to-page.md b/docs/docs/integrations/notion/add-content-to-page.md new file mode 100644 index 000000000..243c09d81 --- /dev/null +++ b/docs/docs/integrations/notion/add-content-to-page.md @@ -0,0 +1,138 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Add Content To Page + +The `AddContentToPage` component converts markdown text to Notion blocks and appends them to a Notion page. + +[Notion Reference](https://developers.notion.com/reference/patch-block-children) + + + +The `AddContentToPage` component enables you to: + +- Convert markdown text to Notion blocks. +- Append the converted blocks to a specified Notion page. +- Seamlessly integrate Notion content creation into Langflow workflows. + + +## Component Usage + +To use the `AddContentToPage` component in a Langflow flow: + +1. **Add the `AddContentToPage` component** to your flow. +2. **Configure the component** by providing: + - `markdown_text`: The markdown text to convert. + - `block_id`: The ID of the Notion page/block to append the content. + - `notion_secret`: The Notion integration token for authentication. +3. **Connect the component** to other nodes in your flow as needed. +4. **Run the flow** to convert the markdown text and append it to the specified Notion page. + +## Component Python Code + +```python +import json +from typing import Optional + +import requests +from langflow.custom import CustomComponent + + +class NotionPageCreator(CustomComponent): + display_name = "Create Page [Notion]" + description = "A component for creating Notion pages." + documentation: str = "https://docs.langflow.org/integrations/notion/add-content-to-page" + icon = "NotionDirectoryLoader" + + def build_config(self): + return { + "database_id": { + "display_name": "Database ID", + "field_type": "str", + "info": "The ID of the Notion database.", + }, + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + "properties": { + "display_name": "Properties", + "field_type": "str", + "info": "The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}", + }, + } + + def build( + self, + database_id: str, + notion_secret: str, + properties: str = '{"Task name": {"id": "title", "type": "title", "title": [{"type": "text", "text": {"content": "Send Notion Components to LF", "link": null}}]}}', + ) -> str: + if not database_id or not properties: + raise ValueError("Invalid input. Please provide 'database_id' and 'properties'.") + + headers = { + "Authorization": f"Bearer {notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + data = { + "parent": {"database_id": database_id}, + "properties": json.loads(properties), + } + + response = requests.post("https://api.notion.com/v1/pages", headers=headers, json=data) + + if response.status_code == 200: + page_id = response.json()["id"] + self.status = f"Successfully created Notion page with ID: {page_id}\n {str(response.json())}" + return response.json() + else: + error_message = f"Failed to create Notion page. Status code: {response.status_code}, Error: {response.text}" + self.status = error_message + raise Exception(error_message) +``` + +## Example Usage + + + +Example of using the `AddContentToPage` component in a Langflow flow using Markdown as input: + + + +In this example, the `AddContentToPage` component connects to a `MarkdownLoader` component to provide the markdown text input. The converted Notion blocks are appended to the specified Notion page using the provided `block_id` and `notion_secret`. + + + +## Best Practices + +When using the `AddContentToPage` component: + +- Ensure markdown text is well-formatted. +- Verify the `block_id` corresponds to the right Notion page/block. +- Keep your Notion integration token secure. +- Test with sample markdown text before production use. + +The `AddContentToPage` component is a powerful tool for integrating Notion content creation into Langflow workflows, facilitating easy conversion of markdown text to Notion blocks and appending them to specific pages. + +## Troubleshooting + +If you encounter any issues while using the `AddContentToPage` component, consider the following: + +- Verify the Notion integration token’s validity and permissions. +- Check the Notion API documentation for updates. +- Ensure markdown text is properly formatted. +- Double-check the `block_id` for correctness. diff --git a/docs/docs/integrations/notion/intro.md b/docs/docs/integrations/notion/intro.md new file mode 100644 index 000000000..293038d4f --- /dev/null +++ b/docs/docs/integrations/notion/intro.md @@ -0,0 +1,43 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Introduction to Notion in Langflow + +The Notion integration in Langflow enables seamless connectivity with Notion databases, pages, and users, facilitating automation and improving productivity. + + + +#### Download Notion Components Bundle + +### Key Features of Notion Integration in Langflow + +- **List Pages**: Retrieve a list of pages from a Notion database and access data stored in your Notion workspace. +- **List Database Properties**: Obtain insights into the properties of a Notion database, allowing for easy understanding of its structure and metadata. +- **Add Page Content**: Programmatically add new content to a Notion page, simplifying the creation and updating of pages. +- **List Users**: Retrieve a list of users with access to a Notion workspace, aiding in user management and collaboration. +- **Update Property**: Update the value of a specific property in a Notion page, enabling easy modification and maintenance of Notion data. + +### Potential Use Cases for Notion Integration in Langflow + +- **Task Automation**: Automate task creation in Notion using Langflow's AI capabilities. Describe the required tasks, and they will be automatically created and updated in Notion. +- **Context Extraction from Meetings**: Leverage AI to analyze meeting contexts, extract key points, and update the relevant Notion pages automatically. +- **Content Creation**: Utilize AI to generate ideas, suggest templates, and populate Notion pages with relevant data, enhancing content management efficiency. + +### Getting Started with Notion Integration in Langflow + +1. **Set Up Notion Integration**: Follow the guide [Setting up a Notion App](./setup) to set up a Notion integration in your workspace. +2. **Configure Notion Components**: Provide the necessary authentication details and parameters to configure the Notion components in your Langflow flows. +3. **Connect Components**: Integrate Notion components with other Langflow components to build your workflow. +4. **Test and Refine**: Ensure your Langflow flow operates as intended by testing and refining it. +5. **Deploy and Run**: Deploy your Langflow flow to automate Notion-related tasks and processes. + +The Notion integration in Langflow offers a powerful toolset for automation and productivity enhancement. Whether managing tasks, extracting meeting insights, or creating content, Langflow and Notion provide robust solutions for streamlining workflows. diff --git a/docs/docs/integrations/notion/list-database-properties.md b/docs/docs/integrations/notion/list-database-properties.md new file mode 100644 index 000000000..c41159893 --- /dev/null +++ b/docs/docs/integrations/notion/list-database-properties.md @@ -0,0 +1,117 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Database Properties + +The `NotionDatabaseProperties` component retrieves properties of a Notion database. It provides a convenient way to integrate Notion database information into your Langflow workflows. + +[Notion Reference](https://developers.notion.com/reference/post-database-query) + + +The `NotionDatabaseProperties` component enables you to: +- Retrieve properties of a Notion database +- Access the retrieved properties in your Langflow flows +- Integrate Notion database information seamlessly into your workflows + + +## Component Usage + +To use the `NotionDatabaseProperties` component in a Langflow flow, follow these steps: + +1. Add the `NotionDatabaseProperties` component to your flow. +2. Configure the component by providing the required inputs: + - `database_id`: The ID of the Notion database you want to retrieve properties from. + - `notion_secret`: The Notion integration token for authentication. +3. Connect the output of the `NotionDatabaseProperties` component to other components in your flow as needed. + +## Component Python code + +```python +import requests +from typing import Dict + +from langflow import CustomComponent +from langflow.schema import Record + + +class NotionDatabaseProperties(CustomComponent): + display_name = "List Database Properties [Notion]" + description = "Retrieve properties of a Notion database." + documentation: str = "https://docs.langflow.org/integrations/notion/list-database-properties" + icon = "NotionDirectoryLoader" + + def build_config(self): + return { + "database_id": { + "display_name": "Database ID", + "field_type": "str", + "info": "The ID of the Notion database.", + }, + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + } + + def build( + self, + database_id: str, + notion_secret: str, + ) -> Record: + url = f"https://api.notion.com/v1/databases/{database_id}" + headers = { + "Authorization": f"Bearer {notion_secret}", + "Notion-Version": "2022-06-28", # Use the latest supported version + } + + response = requests.get(url, headers=headers) + response.raise_for_status() + + data = response.json() + properties = data.get("properties", {}) + + record = Record(text=str(response.json()), data=properties) + self.status = f"Retrieved {len(properties)} properties from the Notion database.\n {record.text}" + return record +``` + +## Example Usage + + +Here's an example of how you can use the `NotionDatabaseProperties` component in a Langflow flow: + + + +In this example, the `NotionDatabaseProperties` component retrieves the properties of a Notion database, and the retrieved properties are then used as input for subsequent components in the flow. + + +## Best Practices + +When using the `NotionDatabaseProperties` component, consider the following best practices: + +- Ensure that you have a valid Notion integration token with the necessary permissions to access the desired database. +- Double-check the database ID to avoid retrieving properties from the wrong database. +- Handle potential errors gracefully by checking the response status and providing appropriate error messages. + +The `NotionDatabaseProperties` component simplifies the process of retrieving properties from a Notion database and integrating them into your Langflow workflows. By leveraging this component, you can easily access and utilize Notion database information in your flows, enabling powerful integrations and automations. + +Feel free to explore the capabilities of the `NotionDatabaseProperties` component and experiment with different use cases to enhance your Langflow workflows! + +## Troubleshooting + +If you encounter any issues while using the `NotionDatabaseProperties` component, consider the following: + +- Verify that the Notion integration token is valid and has the required permissions. +- Check the database ID to ensure it matches the intended Notion database. +- Inspect the response from the Notion API for any error messages or status codes that may indicate the cause of the issue. diff --git a/docs/docs/integrations/notion/list-pages.md b/docs/docs/integrations/notion/list-pages.md new file mode 100644 index 000000000..ea1b04950 --- /dev/null +++ b/docs/docs/integrations/notion/list-pages.md @@ -0,0 +1,179 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# List Pages + +The `NotionListPages` component queries a Notion database with filtering and sorting. It provides a convenient way to integrate Notion database querying capabilities into your Langflow workflows. + +[Notion Reference](https://developers.notion.com/reference/post-database-query) + + + The `NotionListPages` component enables you to: + +- Query a Notion database with custom filters and sorting options +- Retrieve specific pages from a Notion database based on the provided criteria +- Integrate Notion database data seamlessly into your Langflow workflows + + + +## Component Usage + +To use the `NotionListPages +` component in a Langflow flow, follow these steps: + +1. **Add the `NotionListPages +` component to your flow.** +2. **Configure the component by providing the required parameters:** + - `notion_secret`: The Notion integration token for authentication. + - `database_id`: The ID of the Notion database you want to query. + - `query_payload`: A JSON string containing the filters and sorting options for the query. +3. **Connect the `NotionListPages +` component to other components in your flow as needed.** + +## Component Python code + +```python +import requests +import json +from typing import Dict, Any, List +from langflow.custom import CustomComponent +from langflow.schema import Record + +class NotionListPages(CustomComponent): + display_name = "List Pages [Notion]" + description = ( + "Query a Notion database with filtering and sorting. " + "The input should be a JSON string containing the 'filter' and 'sorts' objects. " + "Example input:\n" + '{"filter": {"property": "Status", "select": {"equals": "Done"}}, "sorts": [{"timestamp": "created_time", "direction": "descending"}]}' + ) + documentation: str = "https://docs.langflow.org/integrations/notion/list-pages" + icon = "NotionDirectoryLoader" + + field_order = [ + "notion_secret", + "database_id", + "query_payload", + ] + + def build_config(self): + return { + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + "database_id": { + "display_name": "Database ID", + "field_type": "str", + "info": "The ID of the Notion database to query.", + }, + "query_payload": { + "display_name": "Database query", + "field_type": "str", + "info": "A JSON string containing the filters that will be used for querying the database. EG: {'filter': {'property': 'Status', 'status': {'equals': 'In progress'}}}", + }, + } + + def build( + self, + notion_secret: str, + database_id: str, + query_payload: str = "{}", + ) -> List[Record]: + try: + query_data = json.loads(query_payload) + filter_obj = query_data.get("filter") + sorts = query_data.get("sorts", []) + + url = f"https://api.notion.com/v1/databases/{database_id}/query" + headers = { + "Authorization": f"Bearer {notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + data = { + "sorts": sorts, + } + + if filter_obj: + data["filter"] = filter_obj + + response = requests.post(url, headers=headers, json=data) + response.raise_for_status() + + results = response.json() + records = [] + combined_text = f"Pages found: {len(results['results'])}\n\n" + for page in results['results']: + page_data = { + 'id': page['id'], + 'url': page['url'], + 'created_time': page['created_time'], + 'last_edited_time': page['last_edited_time'], + 'properties': page['properties'], + } + + text = ( + f"id: {page['id']}\n" + f"url: {page['url']}\n" + f"created_time: {page['created_time']}\n" + f"last_edited_time: {page['last_edited_time']}\n" + f"properties: {json.dumps(page['properties'], indent=2)}\n\n" + ) + + combined_text += text + records.append(Record(text=text, data=page_data)) + + self.status = combined_text.strip() + return records + + except Exception as e: + self.status = f"An error occurred: {str(e)}" + return [Record(text=self.status, data=[])] +``` + + + +## Example Usage + +Here's an example of how you can use the `NotionListPages` component in a Langflow flow and passing to the Prompt component: + + + +In this example, the `NotionListPages` component is used to retrieve specific pages from a Notion database based on the provided filters and sorting options. The retrieved data can then be processed further in the subsequent components of the flow. + + +## Best Practices + +When using the `NotionListPages +` component, consider the following best practices: + +- Ensure that you have a valid Notion integration token with the necessary permissions to query the desired database. +- Construct the `query_payload` JSON string carefully, following the Notion API documentation for filtering and sorting options. + +The `NotionListPages +` component provides a powerful way to integrate Notion database querying capabilities into your Langflow workflows. By leveraging this component, you can easily retrieve specific pages from a Notion database based on custom filters and sorting options, enabling you to build more dynamic and data-driven flows. + +We encourage you to explore the capabilities of the `NotionListPages +` component further and experiment with different querying scenarios to unlock the full potential of integrating Notion databases into your Langflow workflows. + +## Troubleshooting + +If you encounter any issues while using the `NotionListPages` component, consider the following: + +- Double-check that the `notion_secret` and `database_id` are correct and valid. +- Verify that the `query_payload` JSON string is properly formatted and contains valid filtering and sorting options. +- Check the Notion API documentation for any updates or changes that may affect the component's functionality. diff --git a/docs/docs/integrations/notion/list-users.md b/docs/docs/integrations/notion/list-users.md new file mode 100644 index 000000000..c22c20ca8 --- /dev/null +++ b/docs/docs/integrations/notion/list-users.md @@ -0,0 +1,127 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# User List + +The `NotionUserList` component retrieves users from Notion. It provides a convenient way to integrate Notion user data into your Langflow workflows. + +[Notion Reference](https://developers.notion.com/reference/get-users) + + + The `NotionUserList` component enables you to: + +- Retrieve user data from Notion +- Access user information such as ID, type, name, and avatar URL +- Integrate Notion user data seamlessly into your Langflow workflows + + +## Component Usage + +To use the `NotionUserList` component in a Langflow flow, follow these steps: + +1. Add the `NotionUserList` component to your flow. +2. Configure the component by providing the required Notion secret token. +3. Connect the component to other nodes in your flow as needed. + +## Component Python code + +```python +import requests +from typing import List + +from langflow import CustomComponent +from langflow.schema import Record + + +class NotionUserList(CustomComponent): + display_name = "List Users [Notion]" + description = "Retrieve users from Notion." + documentation: str = "https://docs.langflow.org/integrations/notion/list-users" + icon = "NotionDirectoryLoader" + + def build_config(self): + return { + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + } + + def build( + self, + notion_secret: str, + ) -> List[Record]: + url = "https://api.notion.com/v1/users" + headers = { + "Authorization": f"Bearer {notion_secret}", + "Notion-Version": "2022-06-28", + } + + response = requests.get(url, headers=headers) + response.raise_for_status() + + data = response.json() + results = data['results'] + + records = [] + for user in results: + id = user['id'] + type = user['type'] + name = user.get('name', '') + avatar_url = user.get('avatar_url', '') + + record_data = { + "id": id, + "type": type, + "name": name, + "avatar_url": avatar_url, + } + + output = "User:\n" + for key, value in record_data.items(): + output += f"{key.replace('_', ' ').title()}: {value}\n" + output += "________________________\n" + + record = Record(text=output, data=record_data) + records.append(record) + + self.status = "\n".join(record.text for record in records) + return records +``` + +## Example Usage + + +Here's an example of how you can use the `NotionUserList` component in a Langflow flow and passing the outputs to the Prompt component: + + + + + +## Best Practices + +When using the `NotionUserList` component, consider the following best practices: + +- Ensure that you have a valid Notion integration token with the necessary permissions to retrieve user data. +- Handle the retrieved user data securely and in compliance with Notion's API usage guidelines. + +The `NotionUserList` component provides a seamless way to integrate Notion user data into your Langflow workflows. By leveraging this component, you can easily retrieve and utilize user information from Notion, enhancing the capabilities of your Langflow applications. Feel free to explore and experiment with the `NotionUserList` component to unlock new possibilities in your Langflow projects! + +## Troubleshooting + +If you encounter any issues while using the `NotionUserList` component, consider the following: + +- Double-check that your Notion integration token is valid and has the required permissions. +- Verify that you have installed the necessary dependencies (`requests`) for the component to function properly. +- Check the Notion API documentation for any updates or changes that may affect the component's functionality. diff --git a/docs/docs/integrations/notion/page-content-viewer.md b/docs/docs/integrations/notion/page-content-viewer.md new file mode 100644 index 000000000..f4eeba052 --- /dev/null +++ b/docs/docs/integrations/notion/page-content-viewer.md @@ -0,0 +1,142 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Page Content + +The `NotionPageContent` component retrieves the content of a Notion page as plain text. It provides a convenient way to integrate Notion page content into your Langflow workflows. + +[Notion Reference](https://developers.notion.com/reference/get-page) + + + +The `NotionPageContent` component enables you to: + +- Retrieve the content of a Notion page as plain text +- Extract text from various block types, including paragraphs, headings, lists, and more +- Integrate Notion page content seamlessly into your Langflow workflows + + + +## Component Usage + +To use the `NotionPageContent` component in a Langflow flow, follow these steps: + +1. Add the `NotionPageContent` component to your flow. +2. Configure the component by providing the required inputs: + - `page_id`: The ID of the Notion page you want to retrieve. + - `notion_secret`: Your Notion integration token for authentication. +3. Connect the output of the `NotionPageContent` component to other components in your flow as needed. + +## Component Python code + +```python +import requests +from typing import Dict, Any + +from langflow import CustomComponent +from langflow.schema import Record + + +class NotionPageContent(CustomComponent): + display_name = "Page Content Viewer [Notion]" + description = "Retrieve the content of a Notion page as plain text." + documentation: str = "https://docs.langflow.org/integrations/notion/page-content-viewer" + icon = "NotionDirectoryLoader" + + def build_config(self): + return { + "page_id": { + "display_name": "Page ID", + "field_type": "str", + "info": "The ID of the Notion page to retrieve.", + }, + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + } + + def build( + self, + page_id: str, + notion_secret: str, + ) -> Record: + blocks_url = f"https://api.notion.com/v1/blocks/{page_id}/children?page_size=100" + headers = { + "Authorization": f"Bearer {notion_secret}", + "Notion-Version": "2022-06-28", # Use the latest supported version + } + + # Retrieve the child blocks + blocks_response = requests.get(blocks_url, headers=headers) + blocks_response.raise_for_status() + blocks_data = blocks_response.json() + + # Parse the blocks and extract the content as plain text + content = self.parse_blocks(blocks_data["results"]) + + self.status = content + return Record(data={"content": content}, text=content) + + def parse_blocks(self, blocks: list) -> str: + content = "" + for block in blocks: + block_type = block["type"] + if block_type in ["paragraph", "heading_1", "heading_2", "heading_3", "quote"]: + content += self.parse_rich_text(block[block_type]["rich_text"]) + "\n\n" + elif block_type in ["bulleted_list_item", "numbered_list_item"]: + content += self.parse_rich_text(block[block_type]["rich_text"]) + "\n" + elif block_type == "to_do": + content += self.parse_rich_text(block["to_do"]["rich_text"]) + "\n" + elif block_type == "code": + content += self.parse_rich_text(block["code"]["rich_text"]) + "\n\n" + elif block_type == "image": + content += f"[Image: {block['image']['external']['url']}]\n\n" + elif block_type == "divider": + content += "---\n\n" + return content.strip() + + def parse_rich_text(self, rich_text: list) -> str: + text = "" + for segment in rich_text: + text += segment["plain_text"] + return text +``` + +## Example Usage + + + +Here's an example of how you can use the `NotionPageContent` component in a Langflow flow: + + + + +## Best Practices + +When using the `NotionPageContent` component, consider the following best practices: + +- Ensure that you have the necessary permissions to access the Notion page you want to retrieve. +- Keep your Notion integration token secure and avoid sharing it publicly. +- Be mindful of the content you retrieve and ensure that it aligns with your intended use case. + +The `NotionPageContent` component provides a seamless way to integrate Notion page content into your Langflow workflows. By leveraging this component, you can easily retrieve and process the content of Notion pages, enabling you to build powerful and dynamic applications. Explore the capabilities of the `NotionPageContent` component and unlock new possibilities in your Langflow projects! + +## Troubleshooting + +If you encounter any issues while using the `NotionPageContent` component, consider the following: + +- Double-check that you have provided the correct Notion page ID. +- Verify that your Notion integration token is valid and has the necessary permissions. +- Check the Notion API documentation for any updates or changes that may affect the component's functionality. diff --git a/docs/docs/integrations/notion/page-create.md b/docs/docs/integrations/notion/page-create.md new file mode 100644 index 000000000..f942f257b --- /dev/null +++ b/docs/docs/integrations/notion/page-create.md @@ -0,0 +1,131 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Page Create + +The `NotionPageCreator` component creates pages in a Notion database. It provides a convenient way to integrate Notion page creation into your Langflow workflows. + +[Notion Reference](https://developers.notion.com/reference/patch-block-children) + + +The `NotionPageCreator` component enables you to: +- Create new pages in a specified Notion database +- Set custom properties for the created pages +- Retrieve the ID and URL of the newly created pages + + +## Component Usage + +To use the `NotionPageCreator` component in a Langflow flow, follow these steps: + +1. Add the `NotionPageCreator` component to your flow. +2. Configure the component by providing the required inputs: + - `database_id`: The ID of the Notion database where the pages will be created. + - `notion_secret`: The Notion integration token for authentication. + - `properties`: The properties of the new page, specified as a JSON string. +3. Connect the component to other components in your flow as needed. +4. Run the flow to create Notion pages based on the configured inputs. + +## Component Python Code + +```python +import json +from typing import Optional + +import requests +from langflow.custom import CustomComponent + + +class NotionPageCreator(CustomComponent): + display_name = "Create Page [Notion]" + description = "A component for creating Notion pages." + documentation: str = "https://docs.langflow.org/integrations/notion/page-create" + icon = "NotionDirectoryLoader" + + def build_config(self): + return { + "database_id": { + "display_name": "Database ID", + "field_type": "str", + "info": "The ID of the Notion database.", + }, + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + "properties": { + "display_name": "Properties", + "field_type": "str", + "info": "The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}", + }, + } + + def build( + self, + database_id: str, + notion_secret: str, + properties: str = '{"Task name": {"id": "title", "type": "title", "title": [{"type": "text", "text": {"content": "Send Notion Components to LF", "link": null}}]}}', + ) -> str: + if not database_id or not properties: + raise ValueError("Invalid input. Please provide 'database_id' and 'properties'.") + + headers = { + "Authorization": f"Bearer {notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + data = { + "parent": {"database_id": database_id}, + "properties": json.loads(properties), + } + + response = requests.post("https://api.notion.com/v1/pages", headers=headers, json=data) + + if response.status_code == 200: + page_id = response.json()["id"] + self.status = f"Successfully created Notion page with ID: {page_id}\n {str(response.json())}" + return response.json() + else: + error_message = f"Failed to create Notion page. Status code: {response.status_code}, Error: {response.text}" + self.status = error_message + raise Exception(error_message) +``` + +## Example Usage + + +Here's an example of how to use the `NotionPageCreator` component in a Langflow flow: + + + + +## Best Practices + +When using the `NotionPageCreator` component, consider the following best practices: + +- Ensure that you have a valid Notion integration token with the necessary permissions to create pages in the specified database. +- Properly format the `properties` input as a JSON string, matching the structure and field types of your Notion database. +- Handle any errors or exceptions that may occur during the page creation process and provide appropriate error messages. +- To avoid the hassle of messing with JSON, we recommend using the LLM to create the JSON for you as input. + +The `NotionPageCreator` component simplifies the process of creating pages in a Notion database directly from your Langflow workflows. By leveraging this component, you can seamlessly integrate Notion page creation functionality into your automated processes, saving time and effort. Feel free to explore the capabilities of the `NotionPageCreator` component and adapt it to suit your specific requirements. + +## Troubleshooting + +If you encounter any issues while using the `NotionPageCreator` component, consider the following: + +- Double-check that the `database_id` and `notion_secret` inputs are correct and valid. +- Verify that the `properties` input is properly formatted as a JSON string and matches the structure of your Notion database. +- Check the Notion API documentation for any updates or changes that may affect the component's functionality. diff --git a/docs/docs/integrations/notion/page-update.md b/docs/docs/integrations/notion/page-update.md new file mode 100644 index 000000000..0370a2b3a --- /dev/null +++ b/docs/docs/integrations/notion/page-update.md @@ -0,0 +1,138 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Page Update + +The `NotionPageUpdate` component updates the properties of a Notion page. It provides a convenient way to integrate updating Notion page properties into your Langflow workflows. + +[Notion Reference](https://developers.notion.com/reference/patch-page) + +## Component Usage + +To use the `NotionPageUpdate` component in your Langflow flow: + +1. Drag and drop the `NotionPageUpdate` component onto the canvas. +2. Double-click the component to open its configuration. +3. Provide the required parameters as defined in the component's `build_config` method. +4. Connect the component to other nodes in your flow as needed. + +## Component Python Code + +```python +import json +import requests +from typing import Dict, Any + +from langflow import CustomComponent +from langflow.schema import Record + + +class NotionPageUpdate(CustomComponent): + display_name = "Update Page Property [Notion]" + description = "Update the properties of a Notion page." + documentation: str = "https://docs.langflow.org/integrations/notion/page-update" + icon = "NotionDirectoryLoader" + + def build_config(self): + return { + "page_id": { + "display_name": "Page ID", + "field_type": "str", + "info": "The ID of the Notion page to update.", + }, + "properties": { + "display_name": "Properties", + "field_type": "str", + "info": "The properties to update on the page (as a JSON string).", + "multiline": True, + }, + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + } + + def build( + self, + page_id: str, + properties: str, + notion_secret: str, + ) -> Record: + url = f"https://api.notion.com/v1/pages/{page_id}" + headers = { + "Authorization": f"Bearer {notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", # Use the latest supported version + } + + try: + parsed_properties = json.loads(properties) + except json.JSONDecodeError as e: + raise ValueError("Invalid JSON format for properties") from e + + data = { + "properties": parsed_properties + } + + response = requests.patch(url, headers=headers, json=data) + response.raise_for_status() + + updated_page = response.json() + + output = "Updated page properties:\n" + for prop_name, prop_value in updated_page["properties"].items(): + output += f"{prop_name}: {prop_value}\n" + + self.status = output + return Record(data=updated_page) +``` + +Let's break down the key parts of this component: + +- The `build_config` method defines the configuration fields for the component. It specifies the required parameters and their properties, such as display names, field types, and any additional information or validation. + +- The `build` method contains the main logic of the component. It takes the configured parameters as input and performs the necessary operations to update the properties of a Notion page. + +- The component interacts with the Notion API to update the page properties. It constructs the API URL, headers, and request data based on the provided parameters. + +- The processed data is returned as a `Record` object, which can be connected to other components in the Langflow flow. The `Record` object contains the updated page data. + +- The component also stores the updated page properties in the `status` attribute for logging and debugging purposes. + +## Example Usage + + +Here's an example of how to use the `NotionPageUpdate` component in a Langflow flow using: + + + + +## Best Practices + +When using the `NotionPageUpdate` component, consider the following best practices: + +- Ensure that you have a valid Notion integration token with the necessary permissions to update page properties. +- Handle edge cases and error scenarios gracefully, such as invalid JSON format for properties or API request failures. +- We recommend using an LLM to generate the inputs for this component, to allow flexibilty + +By leveraging the `NotionPageUpdate` component in Langflow, you can easily integrate updating Notion page properties into your language model workflows and build powerful applications that extend Langflow's capabilities. + +## Troubleshooting + +If you encounter any issues while using the `NotionPageUpdate` component, consider the following: + +- Double-check that you have correctly configured the component with the required parameters, including the page ID, properties JSON, and Notion integration token. +- Verify that your Notion integration token has the necessary permissions to update page properties. +- Check the Langflow logs for any error messages or exceptions related to the component, such as invalid JSON format or API request failures. +- Consult the [Notion API Documentation](https://developers.notion.com/reference/patch-page) for specific troubleshooting steps or common issues related to updating page properties. diff --git a/docs/docs/integrations/notion/search.md b/docs/docs/integrations/notion/search.md new file mode 100644 index 000000000..a972bffc0 --- /dev/null +++ b/docs/docs/integrations/notion/search.md @@ -0,0 +1,184 @@ +import Admonition from "@theme/Admonition"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Notion Search + +The `NotionSearch` component is designed to search all pages and databases that have been shared with an integration in Notion. It provides a convenient way to integrate Notion search capabilities into your Langflow workflows. + +[Notion Reference](https://developers.notion.com/reference/search) + + + The `NotionSearch` component enables you to: + +- Search for pages and databases in Notion that have been shared with an integration +- Filter the search results based on object type (pages or databases) +- Sort the search results in ascending or descending order based on the last edited time + + + +## Component Usage + +To use the `NotionSearch` component in a Langflow flow, follow these steps: + +1. **Add the `NotionSearch` component to your flow.** +2. **Configure the component by providing the required parameters:** + - `notion_secret`: The Notion integration token for authentication. + - `query`: The text to search for in page and database titles. + - `filter_value`: The type of objects to include in the search results (pages or databases). + - `sort_direction`: The direction to sort the search results (ascending or descending). +3. **Connect the `NotionSearch` component to other components in your flow as needed.** + +## Component Python Code + +```python +import requests +from typing import Dict, Any, List +from langflow.custom import CustomComponent +from langflow.schema import Record + +class NotionSearch(CustomComponent): + display_name = "Search Notion" + description = ( + "Searches all pages and databases that have been shared with an integration." + ) + documentation: str = "https://docs.langflow.org/integrations/notion/search" + icon = "NotionDirectoryLoader" + + field_order = [ + "notion_secret", + "query", + "filter_value", + "sort_direction", + ] + + def build_config(self): + return { + "notion_secret": { + "display_name": "Notion Secret", + "field_type": "str", + "info": "The Notion integration token.", + "password": True, + }, + "query": { + "display_name": "Search Query", + "field_type": "str", + "info": "The text that the API compares page and database titles against.", + }, + "filter_value": { + "display_name": "Filter Type", + "field_type": "str", + "info": "Limits the results to either only pages or only databases.", + "options": ["page", "database"], + "default_value": "page", + }, + "sort_direction": { + "display_name": "Sort Direction", + "field_type": "str", + "info": "The direction to sort the results.", + "options": ["ascending", "descending"], + "default_value": "descending", + }, + } + + def build( + self, + notion_secret: str, + query: str = "", + filter_value: str = "page", + sort_direction: str = "descending", + ) -> List[Record]: + try: + url = "https://api.notion.com/v1/search" + headers = { + "Authorization": f"Bearer {notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + data = { + "query": query, + "filter": { + "value": filter_value, + "property": "object" + }, + "sort":{ + "direction": sort_direction, + "timestamp": "last_edited_time" + } + } + + response = requests.post(url, headers=headers, json=data) + response.raise_for_status() + + results = response.json() + records = [] + combined_text = f"Results found: {len(results['results'])}\n\n" + for result in results['results']: + result_data = { + 'id': result['id'], + 'type': result['object'], + 'last_edited_time': result['last_edited_time'], + } + + if result['object'] == 'page': + result_data['title_or_url'] = result['url'] + text = f"id: {result['id']}\ntitle_or_url: {result['url']}\n" + elif result['object'] == 'database': + if 'title' in result and isinstance(result['title'], list) and len(result['title']) > 0: + result_data['title_or_url'] = result['title'][0]['plain_text'] + text = f"id: {result['id']}\ntitle_or_url: {result['title'][0]['plain_text']}\n" + else: + result_data['title_or_url'] = "N/A" + text = f"id: {result['id']}\ntitle_or_url: N/A\n" + + text += f"type: {result['object']}\nlast_edited_time: {result['last_edited_time']}\n\n" + combined_text += text + records.append(Record(text=text, data=result_data)) + + self.status = combined_text + return records + + except Exception as e: + self.status = f"An error occurred: {str(e)}" + return [Record(text=self.status, data=[])] +``` + +## Example Usage + + +Here's an example of how you can use the `NotionSearch` component in a Langflow flow: + + + +In this example, the `NotionSearch` component is used to search for pages and databases in Notion based on the provided query and filter criteria. The retrieved data can then be processed further in the subsequent components of the flow. + + +## Best Practices + +When using the `NotionSearch` component, consider these best practices: + +- Ensure you have a valid Notion integration token with the necessary permissions to search for pages and databases. +- Provide a meaningful search query to narrow down the results to the desired pages or databases. +- Choose the appropriate filter type (`page` or `database`) based on your search requirements. +- Consider the sorting direction (`ascending` or `descending`) to organize the search results effectively. + +The `NotionSearch` component provides a powerful way to integrate Notion search capabilities into your Langflow workflows. By leveraging this component, you can easily search for pages and databases in Notion based on custom queries and filters, enabling you to build more dynamic and data-driven flows. + +We encourage you to explore the capabilities of the `NotionSearch` component further and experiment with different search scenarios to unlock the full potential of integrating Notion search into your Langflow workflows. + +## Troubleshooting + +If you encounter any issues while using the `NotionSearch` component, consider the following: + +- Double-check that the `notion_secret` is correct and valid. +- Verify that the Notion integration has the necessary permissions to access the desired pages and databases. +- Check the Notion API documentation for any updates or changes that may affect the component's functionality. diff --git a/docs/docs/integrations/notion/setup.md b/docs/docs/integrations/notion/setup.md new file mode 100644 index 000000000..72bb8f3b4 --- /dev/null +++ b/docs/docs/integrations/notion/setup.md @@ -0,0 +1,78 @@ +import Admonition from "@theme/Admonition"; + +# Setting up a Notion App + +To use Notion components in Langflow, you first need to create a Notion integration and configure it with the necessary capabilities. This guide will walk you through the process of setting up a Notion integration and granting it access to your Notion databases. + +## Prerequisites + +- A Notion account with access to the workspace where you want to use the integration. +- Admin permissions in the Notion workspace to create and manage integrations. + +## Step 1: Create a Notion Integration + +1. Go to the [Notion Integrations](https://www.notion.com/my-integrations) page. +2. Click on the "New integration" button. +3. Give your integration a name and select the workspace where you want to use it. +4. Click "Submit" to create the integration. + + +When creating the integration, make sure to enable the necessary capabilities based on your requirements. Refer to the [Notion Integration Capabilities](https://developers.notion.com/reference/capabilities) documentation for more information on each capability. + + +## Step 2: Configure Integration Capabilities + +After creating the integration, you need to configure its capabilities to define what actions it can perform and what data it can access. + +1. In the integration settings page, go to the **Capabilities** tab. +2. Enable the required capabilities for your integration. For example: + - If your integration needs to read data from Notion, enable the "Read content" capability. + - If your integration needs to create new content in Notion, enable the "Insert content" capability. + - If your integration needs to update existing content in Notion, enable the "Update content" capability. +3. Configure the user information access level based on your integration's requirements. +4. Save the changes. + +## Step 3: Obtain Integration Token + +To authenticate your integration with Notion, you need to obtain an integration token. + +1. In the integration settings page, go to the "Secrets" tab. +2. Copy the "Internal Integration Token" value. This token will be used to authenticate your integration with Notion. + + +Your integration token is a sensitive piece of information. Make sure to keep it secure and never share it publicly. Store it safely in your Langflow configuration or environment variables. + + +## Step 4: Grant Integration Access to Notion Databases + +For your integration to interact with Notion databases, you need to grant it access to the specific databases it will be working with. + +1. Open the Notion database that you want your integration to access. +2. Click on the "Share" button in the top-right corner of the page. +3. In the "Invite" section, select your integration from the list. +4. Click "Invite" to grant the integration access to the database. + + +If your database contains references to other databases, you need to grant the integration access to those referenced databases as well. Repeat step 4 for each referenced database to ensure your integration has the necessary access. + + +## Using Notion Components in Langflow + +Once you have set up your Notion integration and granted it access to the required databases, you can start using the Notion components in Langflow. + +Langflow provides the following Notion components: + +- **List Pages**: Retrieves a list of pages from a Notion database. +- **List Database Properties**: Retrieves the properties of a Notion database. +- **Add Page Content**: Adds content to a Notion page. +- **List Users**: Retrieves a list of users with access to a Notion workspace. +- **Update Property**: Updates the value of a property in a Notion page. + +Refer to the individual component documentation for more details on how to use each component in your Langflow flows. + +## Additional Resources + +- [Notion API Documentation](https://developers.notion.com/docs/getting-started) +- [Notion Integration Capabilities](https://developers.notion.com/reference/capabilities) + +If you encounter any issues or have questions, please reach out to our support team or consult the Langflow community forums. diff --git a/docs/docs/migration/global-variables.mdx b/docs/docs/migration/global-variables.mdx index 3430ef405..616fa3621 100644 --- a/docs/docs/migration/global-variables.mdx +++ b/docs/docs/migration/global-variables.mdx @@ -105,6 +105,8 @@ The default list at the moment is: - PINECONE_API_KEY - SEARCHAPI_API_KEY - SERPAPI_API_KEY +- UPSTASH_VECTOR_REST_URL +- UPSTASH_VECTOR_REST_TOKEN - VECTARA_CUSTOMER_ID - VECTARA_CORPUS_ID - VECTARA_API_KEY diff --git a/docs/docs/migration/possible-installation-issues.mdx b/docs/docs/migration/possible-installation-issues.mdx index 00b6f2cd2..a012a1c09 100644 --- a/docs/docs/migration/possible-installation-issues.mdx +++ b/docs/docs/migration/possible-installation-issues.mdx @@ -1,4 +1,4 @@ -# Common Installation Issues +# ❗️ Common Installation Issues This is a list of possible issues that you may encounter when installing Langflow 1.0 alpha and how to solve them. @@ -25,11 +25,11 @@ ModuleNotFoundError: No module named 'langflow.__main__' There are two possible reasons for this error: 1. You've installed Langflow using _`pip install langflow`_ but you already had a previous version of Langflow installed in your system. - In this case, you might be running the wrong executable. - To solve this issue, run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_. - If that doesn't work, try uninstalling and reinstalling Langflow with _`python -m pip install langflow --pre -U`_. + In this case, you might be running the wrong executable. + To solve this issue, run the correct executable by running _`python -m langflow run`_ instead of _`langflow run`_. + If that doesn't work, try uninstalling and reinstalling Langflow with _`python -m pip install langflow --pre -U`_. 2. Some version conflicts might have occurred during the installation process. - Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall Langflow and its dependencies. + Run _`python -m pip install langflow --pre -U --force-reinstall`_ to reinstall Langflow and its dependencies. ## _`Something went wrong running migrations. Please, run 'langflow migration --fix'`_ @@ -45,4 +45,3 @@ There are two possible reasons for this error: This error can occur during Langflow upgrades when the new version can't override `langflow-pre.db` in `.cache/langflow/`. Clearing the cache removes this file but will also erase your settings. If you wish to retain your files, back them up before clearing the folder. - diff --git a/docs/docs/starter-projects/basic-prompting.mdx b/docs/docs/starter-projects/basic-prompting.mdx index ef333643f..26b054bcc 100644 --- a/docs/docs/starter-projects/basic-prompting.mdx +++ b/docs/docs/starter-projects/basic-prompting.mdx @@ -4,7 +4,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; import Admonition from "@theme/Admonition"; -# Basic prompting +# Basic Prompting Prompts serve as the inputs to a large language model (LLM), acting as the interface between human instructions and computational tasks. @@ -14,36 +14,17 @@ This article demonstrates how to use Langflow's prompt tools to issue basic prom ## Prerequisites -1. Install Langflow. -```bash -python -m pip install langflow --pre -``` +- [Langflow installed and running](../getting-started/install-langflow.mdx) -2. Start a local Langflow instance with the Langflow CLI: -```bash -langflow run -``` -Or start Langflow with Python: -```bash -python -m langflow run -``` - -Result: -``` -│ Welcome to ⛓ Langflow │ -│ │ -│ Access http://127.0.0.1:7860 │ -│ Collaborate, and contribute at our GitHub Repo 🚀 │ -``` +- [OpenAI API key created](https://platform.openai.com) - - Langflow v1.0 alpha is also available in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). Try it out or follow the instructions [here](/getting-started/huggingface-spaces) to install it locally. - + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) + to create your own Langflow workspace in minutes. -3. Create an [OpenAI API key](https://platform.openai.com). - ## Create the basic prompting project 1. From the Langflow dashboard, click **New Project**. @@ -64,25 +45,21 @@ Examine the **Prompt** component. The **Template** field instructs the LLM to `A This should be interesting... 4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**. - 1. In the **Variable Name** field, enter `openai_api_key`. - 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). - 3. Click **Save Variable**. + 1. In the **Variable Name** field, enter `openai_api_key`. + 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). + 3. Click **Save Variable**. ## Run the basic prompting flow 1. Click the **Run** button. -The **Interaction Panel** opens, where you can converse with your bot. + The **Interaction Panel** opens, where you can converse with your bot. 2. Type a message and press Enter. -The bot responds in a markedly piratical manner! + The bot responds in a markedly piratical manner! ## Modify the prompt for a different result 1. To modify your prompt results, in the **Prompt** template, click the **Template** field. -The **Edit Prompt** window opens. + The **Edit Prompt** window opens. 2. Change `Answer the user as if you were a pirate` to a different character, perhaps `Answer the user as if you were Harold Abelson.` 3. Run the basic prompting flow again. -The response will be markedly different. - - - - + The response will be markedly different. diff --git a/docs/docs/starter-projects/blog-writer.mdx b/docs/docs/starter-projects/blog-writer.mdx index 411233c8b..9380bf114 100644 --- a/docs/docs/starter-projects/blog-writer.mdx +++ b/docs/docs/starter-projects/blog-writer.mdx @@ -4,42 +4,23 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; import Admonition from "@theme/Admonition"; -# Blog writer +# Blog Writer Build a blog writer with OpenAI that uses URLs for reference content. ## Prerequisites -1. Install Langflow. -```bash -python -m pip install langflow --pre -``` +- [Langflow installed and running](../getting-started/install-langflow.mdx) -2. Start a local Langflow instance with the Langflow CLI: -```bash -langflow run -``` -Or start Langflow with Python: -```bash -python -m langflow run -``` - -Result: -```bash -│ Welcome to ⛓ Langflow │ -│ │ -│ Access http://127.0.0.1:7860 │ -│ Collaborate, and contribute at our GitHub Repo 🚀 │ -``` +- [OpenAI API key created](https://platform.openai.com) - - Langflow v1.0 alpha is also available in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). Try it out or follow the instructions [here](/getting-started/huggingface-spaces) to install it locally. - + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) + to create your own Langflow workspace in minutes. -3. Create an [OpenAI API key](https://platform.openai.com). - ## Create the Blog Writer project 1. From the Langflow dashboard, click **New Project**. @@ -58,6 +39,7 @@ Result: This flow creates a one-shot prompt flow with **Prompt**, **OpenAI**, and **Chat Output** components, and augments the flow with reference content and instructions from the **URL** and **Instructions** components. The **Prompt** component's default **Template** field looks like this: + ```bash Reference 1: @@ -81,16 +63,16 @@ The `{instructions}` value is received from the **Value** field of the **Instruc The `reference_1` and `reference_2` values are received from the **URL** fields of the **URL** components. 4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**. - 1. In the **Variable Name** field, enter `openai_api_key`. - 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). - 3. Click **Save Variable**. + 1. In the **Variable Name** field, enter `openai_api_key`. + 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). + 3. Click **Save Variable**. ## Run the Blog Writer flow 1. Click the **Run** button. -The **Interaction Panel** opens, where you can run your one-shot flow. + The **Interaction Panel** opens, where you can run your one-shot flow. 2. Click the **Lighting Bolt** icon to run your flow. 3. The **OpenAI** component constructs a blog post with the **URL** items as context. -The default **URL** values are for web pages at `promptingguide.ai`, so your blog post will be about prompting LLMs. + The default **URL** values are for web pages at `promptingguide.ai`, so your blog post will be about prompting LLMs. -To write about something different, change the values in the **URL** components, and see what the LLM constructs. \ No newline at end of file +To write about something different, change the values in the **URL** components, and see what the LLM constructs. diff --git a/docs/docs/starter-projects/document-qa.mdx b/docs/docs/starter-projects/document-qa.mdx index 1807bb948..ddbcd901a 100644 --- a/docs/docs/starter-projects/document-qa.mdx +++ b/docs/docs/starter-projects/document-qa.mdx @@ -10,36 +10,17 @@ Build a question-and-answer chatbot with a document loaded from local memory. ## Prerequisites -1. Install Langflow. -```bash -python -m pip install langflow --pre -``` +- [Langflow installed and running](../getting-started/install-langflow.mdx) -2. Start a local Langflow instance with the Langflow CLI: -```bash -langflow run -``` -Or start Langflow with Python: -```bash -python -m langflow run -``` - -Result: -``` -│ Welcome to ⛓ Langflow │ -│ │ -│ Access http://127.0.0.1:7860 │ -│ Collaborate, and contribute at our GitHub Repo 🚀 │ -``` +- [OpenAI API key created](https://platform.openai.com) - - Langflow v1.0 alpha is also available in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). Try it out or follow the instructions [here](/getting-started/huggingface-spaces) to install it locally. - + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) + to create your own Langflow workspace in minutes. -3. Create an [OpenAI API key](https://platform.openai.com). - ## Create the Document QA project 1. From the Langflow dashboard, click **New Project**. @@ -61,24 +42,27 @@ The **Prompt** component is instructed to answer questions based on the contents Including a file with the prompt gives the **OpenAI** component context it may not otherwise have access to. 4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**. - 1. In the **Variable Name** field, enter `openai_api_key`. - 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). - 3. Click **Save Variable**. + + 1. In the **Variable Name** field, enter `openai_api_key`. + 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). + 3. Click **Save Variable**. 5. To select a document to load, in the **Files** component, click within the **Path** field. - 1. Select a local file, and then click **Open**. - 2. The file name appears in the field. - - The file must be of an extension type listed [here](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/base/data/utils.py#L13). - + 1. Select a local file, and then click **Open**. + 2. The file name appears in the field. + + The file must be of an extension type listed + [here](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/base/data/utils.py#L13). + ## Run the Document QA flow 1. Click the **Run** button. -The **Interaction Panel** opens, where you can converse with your bot. + The **Interaction Panel** opens, where you can converse with your bot. 2. Type a message and press Enter. -For this example, we loaded an error log `.txt` file and asked, "What went wrong?" -The bot responded: + For this example, we loaded an error log `.txt` file and asked, "What went wrong?" + The bot responded: + ``` The issue occurred during the execution of migrations in the application. Specifically, an error was raised by the Alembic library, indicating that new upgrade operations were detected that had not been accounted for in the existing migration scripts. The operation in question involved modifying the nullable property of a column (apikey, created_at) in the database, with details about the existing type (DATETIME()), existing server default, and other properties. ``` diff --git a/docs/docs/starter-projects/memory-chatbot.mdx b/docs/docs/starter-projects/memory-chatbot.mdx index e9560d9e5..8e38ca3e0 100644 --- a/docs/docs/starter-projects/memory-chatbot.mdx +++ b/docs/docs/starter-projects/memory-chatbot.mdx @@ -4,42 +4,23 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; import Admonition from "@theme/Admonition"; -# Memory chatbot +# Memory Chatbot This flow extends the [basic prompting flow](./basic-prompting.mdx) to include chat memory for unique SessionIDs. ## Prerequisites -1. Install Langflow. -```bash -python -m pip install langflow --pre -``` +- [Langflow installed and running](../getting-started/install-langflow.mdx) -2. Start a local Langflow instance with the Langflow CLI: -```bash -langflow run -``` -Or start Langflow with Python: -```bash -python -m langflow run -``` - -Result: -``` -│ Welcome to ⛓ Langflow │ -│ │ -│ Access http://127.0.0.1:7860 │ -│ Collaborate, and contribute at our GitHub Repo 🚀 │ -``` +- [OpenAI API key created](https://platform.openai.com) - - Langflow v1.0 alpha is also available in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). Try it out or follow the instructions [here](/getting-started/huggingface-spaces) to install it locally. - + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) + to create your own Langflow workspace in minutes. -3. Create an [OpenAI API key](https://platform.openai.com). - ## Create the memory chatbot project 1. From the Langflow dashboard, click **New Project**. @@ -65,16 +46,16 @@ This chatbot is augmented with the **Chat Memory** component, which stores messa The **Chat History** component gives the **OpenAI** component a memory of previous questions. 4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**. - 1. In the **Variable Name** field, enter `openai_api_key`. - 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). - 3. Click **Save Variable**. + 1. In the **Variable Name** field, enter `openai_api_key`. + 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). + 3. Click **Save Variable**. ## Run the memory chatbot flow 1. Click the **Run** button. -The **Interaction Panel** opens, where you can converse with your bot. + The **Interaction Panel** opens, where you can converse with your bot. 2. Type a message and press Enter. -The bot will respond according to the template in the **Prompt** component. + The bot will respond according to the template in the **Prompt** component. 3. Type more questions. In the **Outputs** log, your queries are logged in order. Up to 5 queries are stored by default. Try asking `What is the first subject I asked you about?` to see where the LLM's memory disappears. ## Modify the Session ID field to have multiple conversations @@ -87,11 +68,11 @@ You can demonstrate this by modifying the **Session ID** value to switch between 1. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value from `MySessionID` to `AnotherSessionID`. 2. Click the **Run** button to run your flow. -In the **Interaction Panel**, you will have a new conversation. (You may need to clear the cache with the **Eraser** button). + In the **Interaction Panel**, you will have a new conversation. (You may need to clear the cache with the **Eraser** button). 3. Type a few questions to your bot. 4. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value back to `MySessionID`. 5. Run your flow. -The **Outputs** log of the **Interaction Panel** displays the history from your initial chat with `MySessionID`. + The **Outputs** log of the **Interaction Panel** displays the history from your initial chat with `MySessionID`. ## Store Session ID as a Langflow variable @@ -101,4 +82,3 @@ To store **Session ID** as a Langflow variable, in the **Session ID** field, cli 2. In the **Value** field, enter a value like `1B5EBD79-6E9C-4533-B2C8-7E4FF29E983B`. 3. Click **Save Variable**. 4. Apply this variable to **Chat Input**. - diff --git a/docs/docs/starter-projects/vector-store-rag.mdx b/docs/docs/starter-projects/vector-store-rag.mdx index fda7e432b..d0054e6c4 100644 --- a/docs/docs/starter-projects/vector-store-rag.mdx +++ b/docs/docs/starter-projects/vector-store-rag.mdx @@ -4,7 +4,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; import Admonition from "@theme/Admonition"; -# Vector store RAG +# Vector Store RAG Retrieval Augmented Generation, or RAG, is a pattern for training LLMs on your data and querying it. @@ -17,16 +17,19 @@ We've chosen [Astra DB](https://astra.datastax.com/signup?utm_source=langflow-pr ## Prerequisites - Langflow v1.0 alpha is also available in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true). Try it out or follow the instructions [here](../getting-started/huggingface-spaces) to install it locally. + Langflow v1.0 alpha is also available in HuggingFace Spaces. [Clone the space + using this + link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) + to create your own Langflow workspace in minutes. -* [Langflow installed](../getting-started/install-langflow.mdx) +- [Langflow installed and running](../getting-started/install-langflow.mdx) -* [OpenAI API key](https://platform.openai.com) +- [OpenAI API key](https://platform.openai.com) -* [An Astra DB vector database created](https://docs.datastax.com/en/astra-db-serverless/get-started/quickstart.html) with: - * Application token (`AstraCS:WSnyFUhRxsrg…​`) - * API endpoint (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`) +- [An Astra DB vector database created](https://docs.datastax.com/en/astra-db-serverless/get-started/quickstart.html) with: + - Application token (`AstraCS:WSnyFUhRxsrg…​`) + - API endpoint (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`) ## Create the vector store RAG project @@ -49,38 +52,40 @@ The **ingestion** flow (bottom of the screen) populates the vector store with da It ingests data from a file (**File**), splits it into chunks (**Recursive Character Text Splitter**), indexes it in Astra DB (**Astra DB**), and computes embeddings for the chunks (**OpenAI Embeddings**). This forms a "brain" for the query flow. -The **query** flow (top of the screen) allows users to chat with the embedded vector store data. It's a little more complex: +The **query** flow (top of the screen) allows users to chat with the embedded vector store data. It's a little more complex: -* **Chat Input** component defines where to put the user input coming from the Playground. -* **OpenAI Embeddings** component generates embeddings from the user input. -* **Astra DB Search** component retrieves the most relevant Records from the Astra DB database. -* **Text Output** component turns the Records into Text by concatenating them and also displays it in the Playground. -* **Prompt** component takes in the user input and the retrieved Records as text and builds a prompt for the OpenAI model. -* **OpenAI** component generates a response to the prompt. -* **Chat Output** component displays the response in the Playground. +- **Chat Input** component defines where to put the user input coming from the Playground. +- **OpenAI Embeddings** component generates embeddings from the user input. +- **Astra DB Search** component retrieves the most relevant Records from the Astra DB database. +- **Text Output** component turns the Records into Text by concatenating them and also displays it in the Playground. +- **Prompt** component takes in the user input and the retrieved Records as text and builds a prompt for the OpenAI model. +- **OpenAI** component generates a response to the prompt. +- **Chat Output** component displays the response in the Playground. 4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**. - 1. In the **Variable Name** field, enter `openai_api_key`. - 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). - 3. Click **Save Variable**. -4. To create environment variables for the **Astra DB** and **Astra DB Search** components: - 1. In the **Token** field, click the **Globe** button, and then click **Add New Variable**. - 2. In the **Variable Name** field, enter `astra_token`. - 3. In the **Value** field, paste your Astra application token (`AstraCS:WSnyFUhRxsrg…​`). - 4. Click **Save Variable**. - 5. Repeat the above steps for the **API Endpoint** field, pasting your Astra API Endpoint instead (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`). - 6. Add the global variable to both the **Astra DB** and **Astra DB Search** components. + 1. In the **Variable Name** field, enter `openai_api_key`. + 2. In the **Value** field, paste your OpenAI API Key (`sk-...`). + 3. Click **Save Variable**. + +5. To create environment variables for the **Astra DB** and **Astra DB Search** components: + 1. In the **Token** field, click the **Globe** button, and then click **Add New Variable**. + 2. In the **Variable Name** field, enter `astra_token`. + 3. In the **Value** field, paste your Astra application token (`AstraCS:WSnyFUhRxsrg…​`). + 4. Click **Save Variable**. + 5. Repeat the above steps for the **API Endpoint** field, pasting your Astra API Endpoint instead (`https://ASTRA_DB_ID-ASTRA_DB_REGION.apps.astra.datastax.com`). + 6. Add the global variable to both the **Astra DB** and **Astra DB Search** components. ## Run the vector store RAG flow 1. Click the **Playground** button. -The **Playground** opens, where you can chat with your data. + The **Playground** opens, where you can chat with your data. 2. Type a message and press Enter. (Try something like "What topics do you know about?") 3. The bot will respond with a summary of the data you've embedded. For example, we embedded a PDF of an engine maintenance manual and asked, "How do I change the oil?" The bot responds: + ``` To change the oil in the engine, follow these steps: @@ -102,7 +107,3 @@ You should use a 3/8 inch wrench to remove the oil drain cap. ``` This is the size the engine manual lists as well. This confirms our flow works, because the query returns the unique knowledge we embedded from the Astra vector store. - - - - diff --git a/docs/docs/tutorials/chatprompttemplate_guide.mdx b/docs/docs/tutorials/chatprompttemplate_guide.mdx index 05a8f3333..48059b134 100644 --- a/docs/docs/tutorials/chatprompttemplate_guide.mdx +++ b/docs/docs/tutorials/chatprompttemplate_guide.mdx @@ -3,7 +3,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; -# Building chatbots with System Message +# Building Chatbots with System Message ## Overview diff --git a/docs/docs/tutorials/loading_document.mdx b/docs/docs/tutorials/loading_document.mdx index 73fb85968..4a6143a0e 100644 --- a/docs/docs/tutorials/loading_document.mdx +++ b/docs/docs/tutorials/loading_document.mdx @@ -3,7 +3,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; -# Integrating documents with prompt variables +# Integrating Documents with Prompt Variables ## Overview diff --git a/docs/sidebars.js b/docs/sidebars.js index 81b255a66..d3f4f2671 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -14,7 +14,8 @@ module.exports = { "index", "getting-started/install-langflow", "getting-started/quickstart", - "getting-started/huggingface-spaces", + "getting-started/canvas", + "migration/possible-installation-issues", "getting-started/new-to-llms", ], }, @@ -35,15 +36,12 @@ module.exports = { label: "Administration", collapsed: false, items: [ - "administration/login", "administration/api", + "administration/login", "administration/cli", "administration/playground", "administration/global-env", - "administration/components", - "administration/collection", - "administration/prompt-customization", - "administration/langfuse_integration", + "administration/chat-widget", ], }, { @@ -82,13 +80,13 @@ module.exports = { label: "Example Components", collapsed: true, items: [ - "examples/flow-runner", - "examples/conversation-chain", - "examples/buffer-memory", - "examples/csv-loader", - "examples/searchapi-tool", - "examples/serp-api-tool", - "examples/python-function", + "examples/chat-memory", + "examples/combine-text", + "examples/create-record", + "examples/pass", + "examples/store-message", + "examples/sub-flow", + "examples/text-operator", ], }, { @@ -131,5 +129,28 @@ module.exports = { "contributing/contribute-component", ], }, + { + type: "category", + label: "Integrations", + collapsed: false, + items: [ + { + type: "category", + label: "Notion", + items: [ + "integrations/notion/intro", + "integrations/notion/setup", + "integrations/notion/search", + "integrations/notion/list-database-properties", + "integrations/notion/list-pages", + "integrations/notion/list-users", + "integrations/notion/page-create", + "integrations/notion/add-content-to-page", + "integrations/notion/page-update", + "integrations/notion/page-content-viewer", + ], + }, + ], + }, ], }; diff --git a/docs/static/data/AstraDB-RAG-Flows.json b/docs/static/data/AstraDB-RAG-Flows.json index d38364b4a..d8bd23eb2 100644 --- a/docs/static/data/AstraDB-RAG-Flows.json +++ b/docs/static/data/AstraDB-RAG-Flows.json @@ -532,10 +532,10 @@ "advanced": false, "dynamic": false, "info": "", - "load_from_db": false, + "load_from_db": true, "title_case": false, "input_types": ["Text"], - "value": "" + "value": "OPENAI_API_KEY" }, "openai_api_type": { "type": "str", diff --git a/docs/static/img/api-window.png b/docs/static/img/api-window.png new file mode 100644 index 000000000..47790433f Binary files /dev/null and b/docs/static/img/api-window.png differ diff --git a/docs/static/img/chat-input-with-menu.png b/docs/static/img/chat-input-with-menu.png new file mode 100644 index 000000000..df48a3643 Binary files /dev/null and b/docs/static/img/chat-input-with-menu.png differ diff --git a/docs/static/img/features.png b/docs/static/img/features.png deleted file mode 100644 index 0e55c5a04..000000000 Binary files a/docs/static/img/features.png and /dev/null differ diff --git a/docs/static/img/langflow_basic_howto.gif b/docs/static/img/langflow_basic_howto.gif new file mode 100644 index 000000000..023a294e0 Binary files /dev/null and b/docs/static/img/langflow_basic_howto.gif differ diff --git a/docs/static/img/notion/AddContentToPage_flow_example.png b/docs/static/img/notion/AddContentToPage_flow_example.png new file mode 100644 index 000000000..31aadb080 Binary files /dev/null and b/docs/static/img/notion/AddContentToPage_flow_example.png differ diff --git a/docs/static/img/notion/AddContentToPage_flow_example_dark.png b/docs/static/img/notion/AddContentToPage_flow_example_dark.png new file mode 100644 index 000000000..0d8809209 Binary files /dev/null and b/docs/static/img/notion/AddContentToPage_flow_example_dark.png differ diff --git a/docs/static/img/notion/NotionDatabaseProperties_flow_example.png b/docs/static/img/notion/NotionDatabaseProperties_flow_example.png new file mode 100644 index 000000000..6ec3d7ac1 Binary files /dev/null and b/docs/static/img/notion/NotionDatabaseProperties_flow_example.png differ diff --git a/docs/static/img/notion/NotionDatabaseProperties_flow_example_dark.png b/docs/static/img/notion/NotionDatabaseProperties_flow_example_dark.png new file mode 100644 index 000000000..4d3a5a2c9 Binary files /dev/null and b/docs/static/img/notion/NotionDatabaseProperties_flow_example_dark.png differ diff --git a/docs/static/img/notion/NotionListPages_flow_example.png b/docs/static/img/notion/NotionListPages_flow_example.png new file mode 100644 index 000000000..c04e3d857 Binary files /dev/null and b/docs/static/img/notion/NotionListPages_flow_example.png differ diff --git a/docs/static/img/notion/NotionListPages_flow_example_dark.png b/docs/static/img/notion/NotionListPages_flow_example_dark.png new file mode 100644 index 000000000..ee041bde7 Binary files /dev/null and b/docs/static/img/notion/NotionListPages_flow_example_dark.png differ diff --git a/docs/static/img/notion/NotionPageContent_flow_example.png b/docs/static/img/notion/NotionPageContent_flow_example.png new file mode 100644 index 000000000..5d89af125 Binary files /dev/null and b/docs/static/img/notion/NotionPageContent_flow_example.png differ diff --git a/docs/static/img/notion/NotionPageContent_flow_example_dark.png b/docs/static/img/notion/NotionPageContent_flow_example_dark.png new file mode 100644 index 000000000..144c1e0ed Binary files /dev/null and b/docs/static/img/notion/NotionPageContent_flow_example_dark.png differ diff --git a/docs/static/img/notion/NotionPageCreator_flow_example.png b/docs/static/img/notion/NotionPageCreator_flow_example.png new file mode 100644 index 000000000..1cc14788a Binary files /dev/null and b/docs/static/img/notion/NotionPageCreator_flow_example.png differ diff --git a/docs/static/img/notion/NotionPageCreator_flow_example_dark.png b/docs/static/img/notion/NotionPageCreator_flow_example_dark.png new file mode 100644 index 000000000..97788dbc0 Binary files /dev/null and b/docs/static/img/notion/NotionPageCreator_flow_example_dark.png differ diff --git a/docs/static/img/notion/NotionPageUpdate_flow_example.png b/docs/static/img/notion/NotionPageUpdate_flow_example.png new file mode 100644 index 000000000..dd02f9bba Binary files /dev/null and b/docs/static/img/notion/NotionPageUpdate_flow_example.png differ diff --git a/docs/static/img/notion/NotionPageUpdate_flow_example_dark.png b/docs/static/img/notion/NotionPageUpdate_flow_example_dark.png new file mode 100644 index 000000000..bc37ff236 Binary files /dev/null and b/docs/static/img/notion/NotionPageUpdate_flow_example_dark.png differ diff --git a/docs/static/img/notion/NotionSearch_flow_example.png b/docs/static/img/notion/NotionSearch_flow_example.png new file mode 100644 index 000000000..95e6c72a7 Binary files /dev/null and b/docs/static/img/notion/NotionSearch_flow_example.png differ diff --git a/docs/static/img/notion/NotionSearch_flow_example_dark.png b/docs/static/img/notion/NotionSearch_flow_example_dark.png new file mode 100644 index 000000000..924aff55b Binary files /dev/null and b/docs/static/img/notion/NotionSearch_flow_example_dark.png differ diff --git a/docs/static/img/notion/NotionUserList_flow_example.png b/docs/static/img/notion/NotionUserList_flow_example.png new file mode 100644 index 000000000..e0fbd8579 Binary files /dev/null and b/docs/static/img/notion/NotionUserList_flow_example.png differ diff --git a/docs/static/img/notion/NotionUserList_flow_example_dark.png b/docs/static/img/notion/NotionUserList_flow_example_dark.png new file mode 100644 index 000000000..d59e7d9a8 Binary files /dev/null and b/docs/static/img/notion/NotionUserList_flow_example_dark.png differ diff --git a/docs/static/img/notion/notion_bundle.jpg b/docs/static/img/notion/notion_bundle.jpg new file mode 100644 index 000000000..b6dc62da7 Binary files /dev/null and b/docs/static/img/notion/notion_bundle.jpg differ diff --git a/docs/static/img/notion/notion_components_bundle.png b/docs/static/img/notion/notion_components_bundle.png new file mode 100644 index 000000000..f924ed14a Binary files /dev/null and b/docs/static/img/notion/notion_components_bundle.png differ diff --git a/docs/static/img/notion/notion_components_bundle_dark.png b/docs/static/img/notion/notion_components_bundle_dark.png new file mode 100644 index 000000000..048646bdb Binary files /dev/null and b/docs/static/img/notion/notion_components_bundle_dark.png differ diff --git a/docs/static/img/project-options-menu.png b/docs/static/img/project-options-menu.png new file mode 100644 index 000000000..ab687c9ac Binary files /dev/null and b/docs/static/img/project-options-menu.png differ diff --git a/docs/static/img/single-component.png b/docs/static/img/single-component.png new file mode 100644 index 000000000..93237f3c9 Binary files /dev/null and b/docs/static/img/single-component.png differ diff --git a/docs/static/json_files/Notion_Components_bundle.json b/docs/static/json_files/Notion_Components_bundle.json new file mode 100644 index 000000000..5e632ad9c --- /dev/null +++ b/docs/static/json_files/Notion_Components_bundle.json @@ -0,0 +1,881 @@ +{ + "id": "7cd51434-9767-450f-8742-27857367f8c2", + "data": { + "nodes": [ + { + "id": "RecordsToText-Q69g5", + "type": "genericNode", + "position": { "x": -2671.5528488127866, "y": -963.4266471378126 }, + "data": { + "type": "RecordsToText", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import List\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionUserList(CustomComponent):\r\n display_name = \"List Users [Notion]\"\r\n description = \"Retrieve users from Notion.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-users\"\r\n icon = \"NotionDirectoryLoader\"\r\n \r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n ) -> List[Record]:\r\n url = \"https://api.notion.com/v1/users\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n response = requests.get(url, headers=headers)\r\n response.raise_for_status()\r\n\r\n data = response.json()\r\n results = data['results']\r\n\r\n records = []\r\n for user in results:\r\n id = user['id']\r\n type = user['type']\r\n name = user.get('name', '')\r\n avatar_url = user.get('avatar_url', '')\r\n\r\n record_data = {\r\n \"id\": id,\r\n \"type\": type,\r\n \"name\": name,\r\n \"avatar_url\": avatar_url,\r\n }\r\n\r\n output = \"User:\\n\"\r\n for key, value in record_data.items():\r\n output += f\"{key.replace('_', ' ').title()}: {value}\\n\"\r\n output += \"________________________\\n\"\r\n\r\n record = Record(text=output, data=record_data)\r\n records.append(record)\r\n\r\n self.status = \"\\n\".join(record.text for record in records)\r\n return records", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Retrieve users from Notion.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "List Users [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/list-users", + "custom_fields": { "notion_secret": null }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "RecordsToText-Q69g5", + "description": "Retrieve users from Notion.", + "display_name": "List Users [Notion] " + }, + "selected": false, + "width": 384, + "height": 289, + "dragging": false, + "positionAbsolute": { + "x": -2671.5528488127866, + "y": -963.4266471378126 + } + }, + { + "id": "CustomComponent-PU0K5", + "type": "genericNode", + "position": { "x": -3077.2269116193215, "y": -960.9450220159636 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import json\r\nfrom typing import Optional\r\n\r\nimport requests\r\nfrom langflow.custom import CustomComponent\r\n\r\n\r\nclass NotionPageCreator(CustomComponent):\r\n display_name = \"Create Page [Notion]\"\r\n description = \"A component for creating Notion pages.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-create\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"properties\": {\r\n \"display_name\": \"Properties\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n database_id: str,\r\n notion_secret: str,\r\n properties: str = '{\"Task name\": {\"id\": \"title\", \"type\": \"title\", \"title\": [{\"type\": \"text\", \"text\": {\"content\": \"Send Notion Components to LF\", \"link\": null}}]}}',\r\n ) -> str:\r\n if not database_id or not properties:\r\n raise ValueError(\"Invalid input. Please provide 'database_id' and 'properties'.\")\r\n\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"parent\": {\"database_id\": database_id},\r\n \"properties\": json.loads(properties),\r\n }\r\n\r\n response = requests.post(\"https://api.notion.com/v1/pages\", headers=headers, json=data)\r\n\r\n if response.status_code == 200:\r\n page_id = response.json()[\"id\"]\r\n self.status = f\"Successfully created Notion page with ID: {page_id}\\n {str(response.json())}\"\r\n return response.json()\r\n else:\r\n error_message = f\"Failed to create Notion page. Status code: {response.status_code}, Error: {response.text}\"\r\n self.status = error_message\r\n raise Exception(error_message)", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "database_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "database_id", + "display_name": "Database ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion database.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "properties": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{\"Task name\": {\"id\": \"title\", \"type\": \"title\", \"title\": [{\"type\": \"text\", \"text\": {\"content\": \"Send Notion Components to LF\", \"link\": null}}]}}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "properties", + "display_name": "Properties", + "advanced": false, + "dynamic": false, + "info": "The properties of the new page. Depending on your database setup, this can change. E.G: {'Task name': {'id': 'title', 'type': 'title', 'title': [{'type': 'text', 'text': {'content': 'Send Notion Components to LF', 'link': null}}]}}", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "A component for creating Notion pages.", + "icon": "NotionDirectoryLoader", + "base_classes": ["object", "str", "Text"], + "display_name": "Create Page [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/page-create", + "custom_fields": { + "database_id": null, + "notion_secret": null, + "properties": null + }, + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-PU0K5", + "description": "A component for creating Notion pages.", + "display_name": "Create Page [Notion] " + }, + "selected": false, + "width": 384, + "height": 477, + "positionAbsolute": { + "x": -3077.2269116193215, + "y": -960.9450220159636 + }, + "dragging": false + }, + { + "id": "CustomComponent-YODla", + "type": "genericNode", + "position": { "x": -3485.297183150799, "y": -362.8525892356713 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import Dict\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionDatabaseProperties(CustomComponent):\r\n display_name = \"List Database Properties [Notion]\"\r\n description = \"Retrieve properties of a Notion database.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-database-properties\"\r\n icon = \"NotionDirectoryLoader\"\r\n \r\n def build_config(self):\r\n return {\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n database_id: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n url = f\"https://api.notion.com/v1/databases/{database_id}\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n response = requests.get(url, headers=headers)\r\n response.raise_for_status()\r\n\r\n data = response.json()\r\n properties = data.get(\"properties\", {})\r\n\r\n record = Record(text=str(response.json()), data=properties)\r\n self.status = f\"Retrieved {len(properties)} properties from the Notion database.\\n {record.text}\"\r\n return record", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "database_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "database_id", + "display_name": "Database ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion database.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "NOTION_NMSTX_DB_ID" + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Retrieve properties of a Notion database.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "List Database Properties [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/list-database-properties", + "custom_fields": { "database_id": null, "notion_secret": null }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-YODla", + "description": "Retrieve properties of a Notion database.", + "display_name": "List Database Properties [Notion] " + }, + "selected": true, + "width": 384, + "height": 383, + "dragging": false, + "positionAbsolute": { "x": -3485.297183150799, "y": -362.8525892356713 } + }, + { + "id": "CustomComponent-wHlSz", + "type": "genericNode", + "position": { "x": -2668.7714642455403, "y": -657.2376228212606 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import json\r\nimport requests\r\nfrom typing import Dict, Any\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionPageUpdate(CustomComponent):\r\n display_name = \"Update Page Property [Notion]\"\r\n description = \"Update the properties of a Notion page.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-update\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"page_id\": {\r\n \"display_name\": \"Page ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion page to update.\",\r\n },\r\n \"properties\": {\r\n \"display_name\": \"Properties\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The properties to update on the page (as a JSON string).\",\r\n \"multiline\": True,\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n page_id: str,\r\n properties: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n url = f\"https://api.notion.com/v1/pages/{page_id}\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n try:\r\n parsed_properties = json.loads(properties)\r\n except json.JSONDecodeError as e:\r\n raise ValueError(\"Invalid JSON format for properties\") from e\r\n\r\n data = {\r\n \"properties\": parsed_properties\r\n }\r\n\r\n response = requests.patch(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n updated_page = response.json()\r\n\r\n output = \"Updated page properties:\\n\"\r\n for prop_name, prop_value in updated_page[\"properties\"].items():\r\n output += f\"{prop_name}: {prop_value}\\n\"\r\n\r\n self.status = output\r\n return Record(data=updated_page)", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "page_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "page_id", + "display_name": "Page ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion page to update.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "properties": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "properties", + "display_name": "Properties", + "advanced": false, + "dynamic": false, + "info": "The properties to update on the page (as a JSON string).", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "{ \"title\": [ { \"text\": { \"content\": \"Test Page\" } } ] }" + }, + "_type": "CustomComponent" + }, + "description": "Update the properties of a Notion page.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Update Page Property [Notion]", + "documentation": "https://docs.langflow.org/integrations/notion/page-update", + "custom_fields": { + "page_id": null, + "properties": null, + "notion_secret": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-wHlSz", + "description": "Update the properties of a Notion page.", + "display_name": "Update Page Property [Notion]" + }, + "selected": false, + "width": 384, + "height": 477, + "dragging": false, + "positionAbsolute": { + "x": -2668.7714642455403, + "y": -657.2376228212606 + } + }, + { + "id": "CustomComponent-oelYw", + "type": "genericNode", + "position": { "x": -2253.1007124701327, "y": -448.47240118604134 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import Dict, Any\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\n\r\nclass NotionPageContent(CustomComponent):\r\n display_name = \"Page Content Viewer [Notion]\"\r\n description = \"Retrieve the content of a Notion page as plain text.\"\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-content-viewer\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"page_id\": {\r\n \"display_name\": \"Page ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion page to retrieve.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n page_id: str,\r\n notion_secret: str,\r\n ) -> Record:\r\n blocks_url = f\"https://api.notion.com/v1/blocks/{page_id}/children?page_size=100\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\r\n }\r\n\r\n # Retrieve the child blocks\r\n blocks_response = requests.get(blocks_url, headers=headers)\r\n blocks_response.raise_for_status()\r\n blocks_data = blocks_response.json()\r\n\r\n # Parse the blocks and extract the content as plain text\r\n content = self.parse_blocks(blocks_data[\"results\"])\r\n\r\n self.status = content\r\n return Record(data={\"content\": content}, text=content)\r\n\r\n def parse_blocks(self, blocks: list) -> str:\r\n content = \"\"\r\n for block in blocks:\r\n block_type = block[\"type\"]\r\n if block_type in [\"paragraph\", \"heading_1\", \"heading_2\", \"heading_3\", \"quote\"]:\r\n content += self.parse_rich_text(block[block_type][\"rich_text\"]) + \"\\n\\n\"\r\n elif block_type in [\"bulleted_list_item\", \"numbered_list_item\"]:\r\n content += self.parse_rich_text(block[block_type][\"rich_text\"]) + \"\\n\"\r\n elif block_type == \"to_do\":\r\n content += self.parse_rich_text(block[\"to_do\"][\"rich_text\"]) + \"\\n\"\r\n elif block_type == \"code\":\r\n content += self.parse_rich_text(block[\"code\"][\"rich_text\"]) + \"\\n\\n\"\r\n elif block_type == \"image\":\r\n content += f\"[Image: {block['image']['external']['url']}]\\n\\n\"\r\n elif block_type == \"divider\":\r\n content += \"---\\n\\n\"\r\n return content.strip()\r\n\r\n def parse_rich_text(self, rich_text: list) -> str:\r\n text = \"\"\r\n for segment in rich_text:\r\n text += segment[\"plain_text\"]\r\n return text", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "page_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "page_id", + "display_name": "Page ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion page to retrieve.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Retrieve the content of a Notion page as plain text.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Page Content Viewer [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/page-content-viewer", + "custom_fields": { "page_id": null, "notion_secret": null }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "CustomComponent-oelYw", + "description": "Retrieve the content of a Notion page as plain text.", + "display_name": "Page Content Viewer [Notion] " + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": -2253.1007124701327, + "y": -448.47240118604134 + }, + "dragging": false + }, + { + "id": "CustomComponent-Pn52w", + "type": "genericNode", + "position": { "x": -3070.9222948695096, "y": -472.4537855763852 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nimport json\r\nfrom typing import Dict, Any, List\r\nfrom langflow.custom import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass NotionListPages(CustomComponent):\r\n display_name = \"List Pages [Notion]\"\r\n description = (\r\n \"Query a Notion database with filtering and sorting. \"\r\n \"The input should be a JSON string containing the 'filter' and 'sorts' objects. \"\r\n \"Example input:\\n\"\r\n '{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}'\r\n )\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-pages\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n field_order = [\r\n \"notion_secret\",\r\n \"database_id\",\r\n \"query_payload\",\r\n ]\r\n\r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"database_id\": {\r\n \"display_name\": \"Database ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the Notion database to query.\",\r\n },\r\n \"query_payload\": {\r\n \"display_name\": \"Database query\",\r\n \"field_type\": \"str\",\r\n \"info\": \"A JSON string containing the filters that will be used for querying the database. EG: {'filter': {'property': 'Status', 'status': {'equals': 'In progress'}}}\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n database_id: str,\r\n query_payload: str = \"{}\",\r\n ) -> List[Record]:\r\n try:\r\n query_data = json.loads(query_payload)\r\n filter_obj = query_data.get(\"filter\")\r\n sorts = query_data.get(\"sorts\", [])\r\n\r\n url = f\"https://api.notion.com/v1/databases/{database_id}/query\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"sorts\": sorts,\r\n }\r\n\r\n if filter_obj:\r\n data[\"filter\"] = filter_obj\r\n\r\n response = requests.post(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n results = response.json()\r\n records = []\r\n combined_text = f\"Pages found: {len(results['results'])}\\n\\n\"\r\n for page in results['results']:\r\n page_data = {\r\n 'id': page['id'],\r\n 'url': page['url'],\r\n 'created_time': page['created_time'],\r\n 'last_edited_time': page['last_edited_time'],\r\n 'properties': page['properties'],\r\n }\r\n\r\n text = (\r\n f\"id: {page['id']}\\n\"\r\n f\"url: {page['url']}\\n\"\r\n f\"created_time: {page['created_time']}\\n\"\r\n f\"last_edited_time: {page['last_edited_time']}\\n\"\r\n f\"properties: {json.dumps(page['properties'], indent=2)}\\n\\n\"\r\n )\r\n\r\n combined_text += text\r\n records.append(Record(text=text, data=page_data))\r\n \r\n self.status = combined_text.strip()\r\n return records\r\n\r\n except Exception as e:\r\n self.status = f\"An error occurred: {str(e)}\"\r\n return [Record(text=self.status, data=[])]", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "database_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "database_id", + "display_name": "Database ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the Notion database to query.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "NOTION_NMSTX_DB_ID" + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "query_payload": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "query_payload", + "display_name": "Database query", + "advanced": false, + "dynamic": false, + "info": "A JSON string containing the filters that will be used for querying the database. EG: {'filter': {'property': 'Status', 'status': {'equals': 'In progress'}}}", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "List Pages [Notion] ", + "documentation": "https://docs.langflow.org/integrations/notion/list-pages", + "custom_fields": { + "notion_secret": null, + "database_id": null, + "query_payload": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": ["notion_secret", "database_id", "query_payload"], + "beta": false + }, + "id": "CustomComponent-Pn52w", + "description": "Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}", + "display_name": "List Pages [Notion] " + }, + "selected": false, + "width": 384, + "height": 517, + "positionAbsolute": { + "x": -3070.9222948695096, + "y": -472.4537855763852 + }, + "dragging": false + }, + { + "id": "CustomComponent-I8Dec", + "type": "genericNode", + "position": { "x": -2256.686402636563, "y": -963.4541117792749 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "block_id": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "block_id", + "display_name": "Page/Block ID", + "advanced": false, + "dynamic": false, + "info": "The ID of the page/block to add the content.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import json\r\nfrom typing import List, Dict, Any\r\nfrom markdown import markdown\r\nfrom bs4 import BeautifulSoup\r\nimport requests\r\n\r\nfrom langflow import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass AddContentToPage(CustomComponent):\r\n display_name = \"Add Content to Page [Notion]\"\r\n description = \"Convert markdown text to Notion blocks and append them to a Notion page.\"\r\n documentation: str = \"https://developers.notion.com/reference/patch-block-children\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n def build_config(self):\r\n return {\r\n \"markdown_text\": {\r\n \"display_name\": \"Markdown Text\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The markdown text to convert to Notion blocks.\",\r\n \"multiline\": True,\r\n },\r\n \"block_id\": {\r\n \"display_name\": \"Page/Block ID\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The ID of the page/block to add the content.\",\r\n },\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n }\r\n\r\n def build(self, markdown_text: str, block_id: str, notion_secret: str) -> Record:\r\n html_text = markdown(markdown_text)\r\n soup = BeautifulSoup(html_text, 'html.parser')\r\n blocks = self.process_node(soup)\r\n\r\n url = f\"https://api.notion.com/v1/blocks/{block_id}/children\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"children\": blocks,\r\n }\r\n\r\n response = requests.patch(url, headers=headers, json=data)\r\n self.status = str(response.json())\r\n response.raise_for_status()\r\n\r\n result = response.json()\r\n self.status = f\"Appended {len(blocks)} blocks to page with ID: {block_id}\"\r\n return Record(data=result, text=json.dumps(result))\r\n\r\n def process_node(self, node):\r\n blocks = []\r\n if isinstance(node, str):\r\n text = node.strip()\r\n if text:\r\n if text.startswith('#'):\r\n heading_level = text.count('#', 0, 6)\r\n heading_text = text[heading_level:].strip()\r\n if heading_level == 1:\r\n blocks.append(self.create_block('heading_1', heading_text))\r\n elif heading_level == 2:\r\n blocks.append(self.create_block('heading_2', heading_text))\r\n elif heading_level == 3:\r\n blocks.append(self.create_block('heading_3', heading_text))\r\n else:\r\n blocks.append(self.create_block('paragraph', text))\r\n elif node.name == 'h1':\r\n blocks.append(self.create_block('heading_1', node.get_text(strip=True)))\r\n elif node.name == 'h2':\r\n blocks.append(self.create_block('heading_2', node.get_text(strip=True)))\r\n elif node.name == 'h3':\r\n blocks.append(self.create_block('heading_3', node.get_text(strip=True)))\r\n elif node.name == 'p':\r\n code_node = node.find('code')\r\n if code_node:\r\n code_text = code_node.get_text()\r\n language, code = self.extract_language_and_code(code_text)\r\n blocks.append(self.create_block('code', code, language=language))\r\n elif self.is_table(str(node)):\r\n blocks.extend(self.process_table(node))\r\n else:\r\n blocks.append(self.create_block('paragraph', node.get_text(strip=True)))\r\n elif node.name == 'ul':\r\n blocks.extend(self.process_list(node, 'bulleted_list_item'))\r\n elif node.name == 'ol':\r\n blocks.extend(self.process_list(node, 'numbered_list_item'))\r\n elif node.name == 'blockquote':\r\n blocks.append(self.create_block('quote', node.get_text(strip=True)))\r\n elif node.name == 'hr':\r\n blocks.append(self.create_block('divider', ''))\r\n elif node.name == 'img':\r\n blocks.append(self.create_block('image', '', image_url=node.get('src')))\r\n elif node.name == 'a':\r\n blocks.append(self.create_block('bookmark', node.get_text(strip=True), link_url=node.get('href')))\r\n elif node.name == 'table':\r\n blocks.extend(self.process_table(node))\r\n\r\n for child in node.children:\r\n if isinstance(child, str):\r\n continue\r\n blocks.extend(self.process_node(child))\r\n\r\n return blocks\r\n\r\n def extract_language_and_code(self, code_text):\r\n lines = code_text.split('\\n')\r\n language = lines[0].strip()\r\n code = '\\n'.join(lines[1:]).strip()\r\n return language, code\r\n\r\n def is_code_block(self, text):\r\n return text.startswith('```')\r\n\r\n def extract_code_block(self, text):\r\n lines = text.split('\\n')\r\n language = lines[0].strip('`').strip()\r\n code = '\\n'.join(lines[1:]).strip('`').strip()\r\n return language, code\r\n \r\n def is_table(self, text):\r\n rows = text.split('\\n')\r\n if len(rows) < 2:\r\n return False\r\n\r\n has_separator = False\r\n for i, row in enumerate(rows):\r\n if '|' in row:\r\n cells = [cell.strip() for cell in row.split('|')]\r\n cells = [cell for cell in cells if cell] # Remove empty cells\r\n if i == 1 and all(set(cell) <= set('-|') for cell in cells):\r\n has_separator = True\r\n elif not cells:\r\n return False\r\n\r\n return has_separator and len(rows) >= 3\r\n\r\n def process_list(self, node, list_type):\r\n blocks = []\r\n for item in node.find_all('li'):\r\n item_text = item.get_text(strip=True)\r\n checked = item_text.startswith('[x]')\r\n is_checklist = item_text.startswith('[ ]') or checked\r\n\r\n if is_checklist:\r\n item_text = item_text.replace('[x]', '').replace('[ ]', '').strip()\r\n blocks.append(self.create_block('to_do', item_text, checked=checked))\r\n else:\r\n blocks.append(self.create_block(list_type, item_text))\r\n return blocks\r\n\r\n def process_table(self, node):\r\n blocks = []\r\n header_row = node.find('thead').find('tr') if node.find('thead') else None\r\n body_rows = node.find('tbody').find_all('tr') if node.find('tbody') else []\r\n\r\n if header_row or body_rows:\r\n table_width = max(len(header_row.find_all(['th', 'td'])) if header_row else 0,\r\n max(len(row.find_all(['th', 'td'])) for row in body_rows))\r\n\r\n table_block = self.create_block('table', '', table_width=table_width, has_column_header=bool(header_row))\r\n blocks.append(table_block)\r\n\r\n if header_row:\r\n header_cells = [cell.get_text(strip=True) for cell in header_row.find_all(['th', 'td'])]\r\n header_row_block = self.create_block('table_row', header_cells)\r\n blocks.append(header_row_block)\r\n\r\n for row in body_rows:\r\n cells = [cell.get_text(strip=True) for cell in row.find_all(['th', 'td'])]\r\n row_block = self.create_block('table_row', cells)\r\n blocks.append(row_block)\r\n\r\n return blocks\r\n \r\n def create_block(self, block_type: str, content: str, **kwargs) -> Dict[str, Any]:\r\n block = {\r\n \"object\": \"block\",\r\n \"type\": block_type,\r\n block_type: {},\r\n }\r\n\r\n if block_type in [\"paragraph\", \"heading_1\", \"heading_2\", \"heading_3\", \"bulleted_list_item\", \"numbered_list_item\", \"quote\"]:\r\n block[block_type][\"rich_text\"] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n elif block_type == 'to_do':\r\n block[block_type][\"rich_text\"] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n block[block_type]['checked'] = kwargs.get('checked', False)\r\n elif block_type == 'code':\r\n block[block_type]['rich_text'] = [\r\n {\r\n \"type\": \"text\",\r\n \"text\": {\r\n \"content\": content,\r\n },\r\n }\r\n ]\r\n block[block_type]['language'] = kwargs.get('language', 'plain text')\r\n elif block_type == 'image':\r\n block[block_type] = {\r\n \"type\": \"external\",\r\n \"external\": {\r\n \"url\": kwargs.get('image_url', '')\r\n }\r\n }\r\n elif block_type == 'divider':\r\n pass\r\n elif block_type == 'bookmark':\r\n block[block_type]['url'] = kwargs.get('link_url', '')\r\n elif block_type == 'table':\r\n block[block_type]['table_width'] = kwargs.get('table_width', 0)\r\n block[block_type]['has_column_header'] = kwargs.get('has_column_header', False)\r\n block[block_type]['has_row_header'] = kwargs.get('has_row_header', False)\r\n elif block_type == 'table_row':\r\n block[block_type]['cells'] = [[{'type': 'text', 'text': {'content': cell}} for cell in content]]\r\n\r\n return block", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "markdown_text": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "markdown_text", + "display_name": "Markdown Text", + "advanced": false, + "dynamic": false, + "info": "The markdown text to convert to Notion blocks.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "# Heading 1\n\n## Heading 2\n\n### Heading 3\n\nThis is a regular paragraph.\n\nHere's another paragraph with an image:\n![Image](https://example.com/image.jpg)\n\n## Checklist\n- [x] Completed task\n- [ ] Incomplete task\n- [x] Another completed task\n\n## Numbered List\n1. First item\n2. Second item\n3. Third item\n\n## Bulleted List\n- Item 1\n- Item 2\n- Item 3\n\n## Code Block\n```python\ndef hello_world():\n print(\"Hello, World!\")\n```\n\n## Quote\n> This is a blockquote.\n> It can span multiple lines.\n\n## Horizontal Rule\n---\n\n\n## Link\n[Notion API Documentation](https://developers.notion.com)\n\n" + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Convert markdown text to Notion blocks and append them to a Notion page.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Add Content to Page [Notion] ", + "documentation": "https://developers.notion.com/reference/patch-block-children", + "custom_fields": { + "markdown_text": null, + "block_id": null, + "notion_secret": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "official": false + }, + "id": "CustomComponent-I8Dec" + }, + "selected": false, + "width": 384, + "height": 497, + "positionAbsolute": { + "x": -2256.686402636563, + "y": -963.4541117792749 + }, + "dragging": false + }, + { + "id": "CustomComponent-ZcsA9", + "type": "genericNode", + "position": { "x": -3488.029350341937, "y": -965.3756250644985 }, + "data": { + "type": "CustomComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import requests\r\nfrom typing import Dict, Any, List\r\nfrom langflow.custom import CustomComponent\r\nfrom langflow.schema import Record\r\n\r\nclass NotionSearch(CustomComponent):\r\n display_name = \"Search Notion\"\r\n description = (\r\n \"Searches all pages and databases that have been shared with an integration.\"\r\n )\r\n documentation: str = \"https://docs.langflow.org/integrations/notion/search\"\r\n icon = \"NotionDirectoryLoader\"\r\n\r\n field_order = [\r\n \"notion_secret\",\r\n \"query\",\r\n \"filter_value\",\r\n \"sort_direction\",\r\n ]\r\n\r\n def build_config(self):\r\n return {\r\n \"notion_secret\": {\r\n \"display_name\": \"Notion Secret\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The Notion integration token.\",\r\n \"password\": True,\r\n },\r\n \"query\": {\r\n \"display_name\": \"Search Query\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The text that the API compares page and database titles against.\",\r\n },\r\n \"filter_value\": {\r\n \"display_name\": \"Filter Type\",\r\n \"field_type\": \"str\",\r\n \"info\": \"Limits the results to either only pages or only databases.\",\r\n \"options\": [\"page\", \"database\"],\r\n \"default_value\": \"page\",\r\n },\r\n \"sort_direction\": {\r\n \"display_name\": \"Sort Direction\",\r\n \"field_type\": \"str\",\r\n \"info\": \"The direction to sort the results.\",\r\n \"options\": [\"ascending\", \"descending\"],\r\n \"default_value\": \"descending\",\r\n },\r\n }\r\n\r\n def build(\r\n self,\r\n notion_secret: str,\r\n query: str = \"\",\r\n filter_value: str = \"page\",\r\n sort_direction: str = \"descending\",\r\n ) -> List[Record]:\r\n try:\r\n url = \"https://api.notion.com/v1/search\"\r\n headers = {\r\n \"Authorization\": f\"Bearer {notion_secret}\",\r\n \"Content-Type\": \"application/json\",\r\n \"Notion-Version\": \"2022-06-28\",\r\n }\r\n\r\n data = {\r\n \"query\": query,\r\n \"filter\": {\r\n \"value\": filter_value,\r\n \"property\": \"object\"\r\n },\r\n \"sort\":{\r\n \"direction\": sort_direction,\r\n \"timestamp\": \"last_edited_time\"\r\n }\r\n }\r\n\r\n response = requests.post(url, headers=headers, json=data)\r\n response.raise_for_status()\r\n\r\n results = response.json()\r\n records = []\r\n combined_text = f\"Results found: {len(results['results'])}\\n\\n\"\r\n for result in results['results']:\r\n result_data = {\r\n 'id': result['id'],\r\n 'type': result['object'],\r\n 'last_edited_time': result['last_edited_time'],\r\n }\r\n \r\n if result['object'] == 'page':\r\n result_data['title_or_url'] = result['url']\r\n text = f\"id: {result['id']}\\ntitle_or_url: {result['url']}\\n\"\r\n elif result['object'] == 'database':\r\n if 'title' in result and isinstance(result['title'], list) and len(result['title']) > 0:\r\n result_data['title_or_url'] = result['title'][0]['plain_text']\r\n text = f\"id: {result['id']}\\ntitle_or_url: {result['title'][0]['plain_text']}\\n\"\r\n else:\r\n result_data['title_or_url'] = \"N/A\"\r\n text = f\"id: {result['id']}\\ntitle_or_url: N/A\\n\"\r\n\r\n text += f\"type: {result['object']}\\nlast_edited_time: {result['last_edited_time']}\\n\\n\"\r\n combined_text += text\r\n records.append(Record(text=text, data=result_data))\r\n \r\n self.status = combined_text\r\n return records\r\n\r\n except Exception as e:\r\n self.status = f\"An error occurred: {str(e)}\"\r\n return [Record(text=self.status, data=[])]", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "filter_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "database", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["page", "database"], + "name": "filter_value", + "display_name": "Filter Type", + "advanced": false, + "dynamic": false, + "info": "Limits the results to either only pages or only databases.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "notion_secret": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "notion_secret", + "display_name": "Notion Secret", + "advanced": false, + "dynamic": false, + "info": "The Notion integration token.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "query": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "query", + "display_name": "Search Query", + "advanced": false, + "dynamic": false, + "info": "The text that the API compares page and database titles against.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sort_direction": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "descending", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["ascending", "descending"], + "name": "sort_direction", + "display_name": "Sort Direction", + "advanced": false, + "dynamic": false, + "info": "The direction to sort the results.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Searches all pages and databases that have been shared with an integration.", + "icon": "NotionDirectoryLoader", + "base_classes": ["Record"], + "display_name": "Search [Notion]", + "documentation": "https://docs.langflow.org/integrations/notion/search", + "custom_fields": { + "notion_secret": null, + "query": null, + "filter_value": null, + "sort_direction": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "notion_secret", + "query", + "filter_value", + "sort_direction" + ], + "beta": false + }, + "id": "CustomComponent-ZcsA9", + "description": "Searches all pages and databases that have been shared with an integration.", + "display_name": "Search [Notion]" + }, + "selected": false, + "width": 384, + "height": 591, + "positionAbsolute": { + "x": -3488.029350341937, + "y": -965.3756250644985 + }, + "dragging": false + } + ], + "edges": [], + "viewport": { + "x": 2623.378922967084, + "y": 696.8541079344027, + "zoom": 0.5981384177708997 + } + }, + "description": "A Bundle containing Notion components for Page and Database manipulation. You can list pages, users databases, update properties, create new pages and add content to Notion Pages.", + "name": "Notion - Components", + "last_tested_version": "1.0.0a36", + "is_component": false +} diff --git a/docs/static/logos/botmessage.svg b/docs/static/logos/botmessage.svg new file mode 100644 index 000000000..ab468da41 --- /dev/null +++ b/docs/static/logos/botmessage.svg @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/docs/static/logos/greencheck.svg b/docs/static/logos/greencheck.svg new file mode 100644 index 000000000..842be95f7 --- /dev/null +++ b/docs/static/logos/greencheck.svg @@ -0,0 +1,11 @@ + + + diff --git a/docs/static/logos/playbutton.svg b/docs/static/logos/playbutton.svg new file mode 100644 index 000000000..978407473 --- /dev/null +++ b/docs/static/logos/playbutton.svg @@ -0,0 +1,11 @@ + + + diff --git a/docs/static/videos/chat_memory.mp4 b/docs/static/videos/chat_memory.mp4 new file mode 100644 index 000000000..ffed26a74 Binary files /dev/null and b/docs/static/videos/chat_memory.mp4 differ diff --git a/docs/static/videos/combine_text.mp4 b/docs/static/videos/combine_text.mp4 new file mode 100644 index 000000000..7e48303c2 Binary files /dev/null and b/docs/static/videos/combine_text.mp4 differ diff --git a/docs/static/videos/create_record.mp4 b/docs/static/videos/create_record.mp4 new file mode 100644 index 000000000..558f702e3 Binary files /dev/null and b/docs/static/videos/create_record.mp4 differ diff --git a/docs/static/videos/langflow_global_variables.mp4 b/docs/static/videos/langflow_global_variables.mp4 new file mode 100644 index 000000000..8be58e779 Binary files /dev/null and b/docs/static/videos/langflow_global_variables.mp4 differ diff --git a/docs/static/videos/langflow_playground.mp4 b/docs/static/videos/langflow_playground.mp4 new file mode 100644 index 000000000..aa7488c5f Binary files /dev/null and b/docs/static/videos/langflow_playground.mp4 differ diff --git a/docs/static/videos/pass.mp4 b/docs/static/videos/pass.mp4 new file mode 100644 index 000000000..bb062364e Binary files /dev/null and b/docs/static/videos/pass.mp4 differ diff --git a/docs/static/videos/store_message.mp4 b/docs/static/videos/store_message.mp4 new file mode 100644 index 000000000..c8352da0e Binary files /dev/null and b/docs/static/videos/store_message.mp4 differ diff --git a/docs/static/videos/sub_flow.mp4 b/docs/static/videos/sub_flow.mp4 new file mode 100644 index 000000000..24222e815 Binary files /dev/null and b/docs/static/videos/sub_flow.mp4 differ diff --git a/docs/static/videos/text_operator.mp4 b/docs/static/videos/text_operator.mp4 new file mode 100644 index 000000000..3124e6bc4 Binary files /dev/null and b/docs/static/videos/text_operator.mp4 differ diff --git a/package-lock.json b/package-lock.json deleted file mode 100644 index 8c3f329a0..000000000 --- a/package-lock.json +++ /dev/null @@ -1,932 +0,0 @@ -{ - "name": "langflow", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "dependencies": { - "@radix-ui/react-popover": "^1.0.7", - "cmdk": "^0.2.0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.23.2", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.2.tgz", - "integrity": "sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg==", - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@floating-ui/core": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.5.0.tgz", - "integrity": "sha512-kK1h4m36DQ0UHGj5Ah4db7R0rHemTqqO0QLvUqi1/mUUp3LuAWbWxdxSIf/XsnH9VS6rRVPLJCncjRzUvyCLXg==", - "dependencies": { - "@floating-ui/utils": "^0.1.3" - } - }, - "node_modules/@floating-ui/dom": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.3.tgz", - "integrity": "sha512-ClAbQnEqJAKCJOEbbLo5IUlZHkNszqhuxS4fHAVxRPXPya6Ysf2G8KypnYcOTpx6I8xcgF9bbHb6g/2KpbV8qA==", - "dependencies": { - "@floating-ui/core": "^1.4.2", - "@floating-ui/utils": "^0.1.3" - } - }, - "node_modules/@floating-ui/react-dom": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.4.tgz", - "integrity": "sha512-CF8k2rgKeh/49UrnIBs4BdxPUV6vize/Db1d/YbCLyp9GiVZ0BEwf5AiDSxJRCr6yOkGqTFHtmrULxkEfYZ7dQ==", - "dependencies": { - "@floating-ui/dom": "^1.5.1" - }, - "peerDependencies": { - "react": ">=16.8.0", - "react-dom": ">=16.8.0" - } - }, - "node_modules/@floating-ui/utils": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.6.tgz", - "integrity": "sha512-OfX7E2oUDYxtBvsuS4e/jSn4Q9Qb6DzgeYtsAdkPZ47znpoNsMgZw0+tVijiv3uGNR6dgNlty6r9rzIzHjtd/A==" - }, - "node_modules/@radix-ui/primitive": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.1.tgz", - "integrity": "sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/@radix-ui/react-arrow": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz", - "integrity": "sha512-wSP+pHsB/jQRaL6voubsQ/ZlrGBHHrOjmBnr19hxYgtS0WvAFwZhK2WP/YY5yF9uKECCEEDGxuLxq1NBK51wFA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-compose-refs": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz", - "integrity": "sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-context": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.1.tgz", - "integrity": "sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-dialog": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.0.tgz", - "integrity": "sha512-Yn9YU+QlHYLWwV1XfKiqnGVpWYWk6MeBVM6x/bcoyPvxgjQGoeT35482viLPctTMWoMw0PoHgqfSox7Ig+957Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.0", - "@radix-ui/react-compose-refs": "1.0.0", - "@radix-ui/react-context": "1.0.0", - "@radix-ui/react-dismissable-layer": "1.0.0", - "@radix-ui/react-focus-guards": "1.0.0", - "@radix-ui/react-focus-scope": "1.0.0", - "@radix-ui/react-id": "1.0.0", - "@radix-ui/react-portal": "1.0.0", - "@radix-ui/react-presence": "1.0.0", - "@radix-ui/react-primitive": "1.0.0", - "@radix-ui/react-slot": "1.0.0", - "@radix-ui/react-use-controllable-state": "1.0.0", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "2.5.4" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/primitive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.0.tgz", - "integrity": "sha512-3e7rn8FDMin4CgeL7Z/49smCA3rFYY3Ha2rUQ7HRWFadS5iCRw08ZgVT1LaNTCNqgvrUiyczLflrVrF0SRQtNA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-compose-refs": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.0.tgz", - "integrity": "sha512-0KaSv6sx787/hK3eF53iOkiSLwAGlFMx5lotrqD2pTjB18KbybKoEIgkNZTKC60YECDQTKGTRcDBILwZVqVKvA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-context": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.0.tgz", - "integrity": "sha512-1pVM9RfOQ+n/N5PJK33kRSKsr1glNxomxONs5c49MliinBY6Yw2Q995qfBUUo0/Mbg05B/sGA0gkgPI7kmSHBg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-dismissable-layer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.0.tgz", - "integrity": "sha512-n7kDRfx+LB1zLueRDvZ1Pd0bxdJWDUZNQ/GWoxDn2prnuJKRdxsjulejX/ePkOsLi2tTm6P24mDqlMSgQpsT6g==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.0", - "@radix-ui/react-compose-refs": "1.0.0", - "@radix-ui/react-primitive": "1.0.0", - "@radix-ui/react-use-callback-ref": "1.0.0", - "@radix-ui/react-use-escape-keydown": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-guards": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.0.tgz", - "integrity": "sha512-UagjDk4ijOAnGu4WMUPj9ahi7/zJJqNZ9ZAiGPp7waUWJO0O1aWXi/udPphI0IUjvrhBsZJGSN66dR2dsueLWQ==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-focus-scope": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.0.tgz", - "integrity": "sha512-C4SWtsULLGf/2L4oGeIHlvWQx7Rf+7cX/vKOAD2dXW0A1b5QXwi3wWeaEgW+wn+SEVrraMUk05vLU9fZZz5HbQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.0", - "@radix-ui/react-primitive": "1.0.0", - "@radix-ui/react-use-callback-ref": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-id": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.0.tgz", - "integrity": "sha512-Q6iAB/U7Tq3NTolBBQbHTgclPmGWE3OlktGGqrClPozSw4vkQ1DfQAOtzgRPecKsMdJINE05iaoDUG8tRzCBjw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-portal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.0.tgz", - "integrity": "sha512-a8qyFO/Xb99d8wQdu4o7qnigNjTPG123uADNecz0eX4usnQEj7o+cG4ZX4zkqq98NYekT7UoEQIjxBNWIFuqTA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-presence": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.0.0.tgz", - "integrity": "sha512-A+6XEvN01NfVWiKu38ybawfHsBjWum42MRPnEuqPsBZ4eV7e/7K321B5VgYMPv3Xx5An6o1/l9ZuDBgmcmWK3w==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.0", - "@radix-ui/react-use-layout-effect": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-primitive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-1.0.0.tgz", - "integrity": "sha512-EyXe6mnRlHZ8b6f4ilTDrXmkLShICIuOTTj0GX4w1rp+wSxf3+TD05u1UOITC8VsJ2a9nwHvdXtOXEOl0Cw/zQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-slot": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.0.tgz", - "integrity": "sha512-3mrKauI/tWXo1Ll+gN5dHcxDPdm/Df1ufcDLCecn+pnCIVcdWE7CujXo8QaXOWRJyZyQWWbpB8eFwHzWXlv5mQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-callback-ref": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.0.tgz", - "integrity": "sha512-GZtyzoHz95Rhs6S63D2t/eqvdFCm7I+yHMLVQheKM7nBD8mbZIt+ct1jz4536MDnaOGKIxynJ8eHTkVGVVkoTg==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-controllable-state": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.0.tgz", - "integrity": "sha512-FohDoZvk3mEXh9AWAVyRTYR4Sq7/gavuofglmiXB2g1aKyboUD4YtgWxKj8O5n+Uak52gXQ4wKz5IFST4vtJHg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-escape-keydown": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.0.tgz", - "integrity": "sha512-JwfBCUIfhXRxKExgIqGa4CQsiMemo1Xt0W/B4ei3fpzpvPENKpMKQ8mZSB6Acj3ebrAEgi2xiQvcI1PAAodvyg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "1.0.0" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-layout-effect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.0.tgz", - "integrity": "sha512-6Tpkq+R6LOlmQb1R5NNETLG0B4YP0wc+klfXafpUCj6JGyaUc8il7/kUZ7m59rGbXGczE9Bs+iz2qloqsZBduQ==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "react": "^16.8 || ^17.0 || ^18.0" - } - }, - "node_modules/@radix-ui/react-dialog/node_modules/react-remove-scroll": { - "version": "2.5.4", - "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.4.tgz", - "integrity": "sha512-xGVKJJr0SJGQVirVFAUZ2k1QLyO6m+2fy0l8Qawbp5Jgrv3DeLalrfMNBFSlmz5kriGGzsVBtGVnf4pTKIhhWA==", - "dependencies": { - "react-remove-scroll-bar": "^2.3.3", - "react-style-singleton": "^2.2.1", - "tslib": "^2.1.0", - "use-callback-ref": "^1.3.0", - "use-sidecar": "^1.1.2" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-dismissable-layer": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz", - "integrity": "sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-escape-keydown": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-focus-guards": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.1.tgz", - "integrity": "sha512-Rect2dWbQ8waGzhMavsIbmSVCgYxkXLxxR3ZvCX79JOglzdEy4JXMb98lq4hPxUbLr77nP0UOGf4rcMU+s1pUA==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-focus-scope": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.4.tgz", - "integrity": "sha512-sL04Mgvf+FmyvZeYfNu1EPAaaxD+aw7cYeIB9L9Fvq8+urhltTRaEo5ysKOpHuKPclsZcSUMKlN05x4u+CINpA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-id": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.1.tgz", - "integrity": "sha512-tI7sT/kqYp8p96yGWY1OAnLHrqDgzHefRBKQ2YAkBS5ja7QLcZ9Z/uY7bEjPUatf8RomoXM8/1sMj1IJaE5UzQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-popover": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.7.tgz", - "integrity": "sha512-shtvVnlsxT6faMnK/a7n0wptwBD23xc1Z5mdrtKLwVEfsEMXodS0r5s0/g5P0hX//EKYZS2sxUjqfzlg52ZSnQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.5", - "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.4", - "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.3", - "@radix-ui/react-portal": "1.0.4", - "@radix-ui/react-presence": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-slot": "1.0.2", - "@radix-ui/react-use-controllable-state": "1.0.1", - "aria-hidden": "^1.1.1", - "react-remove-scroll": "2.5.5" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-popper": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.3.tgz", - "integrity": "sha512-cKpopj/5RHZWjrbF2846jBNacjQVwkP068DfmgrNJXpvVWrOvlAmE9xSiy5OqeE+Gi8D9fP+oDhUnPqNMY8/5w==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@floating-ui/react-dom": "^2.0.0", - "@radix-ui/react-arrow": "1.0.3", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-layout-effect": "1.0.1", - "@radix-ui/react-use-rect": "1.0.1", - "@radix-ui/react-use-size": "1.0.1", - "@radix-ui/rect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-portal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.4.tgz", - "integrity": "sha512-Qki+C/EuGUVCQTOTD5vzJzJuMUlewbzuKyUy+/iHM2uwGiru9gZeBJtHAPKAEkB5KWGi9mP/CHKcY0wt1aW45Q==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-presence": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.0.1.tgz", - "integrity": "sha512-UXLW4UAbIY5ZjcvzjfRFo5gxva8QirC9hF7wRE4U5gz+TP0DbRk+//qyuAQ1McDxBt1xNMBTaciFGvEmJvAZCg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-use-layout-effect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-primitive": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-1.0.3.tgz", - "integrity": "sha512-yi58uVyoAcK/Nq1inRY56ZSjKypBNKTa/1mcL8qdl6oJeEaDbOldlzrGn7P6Q3Id5d+SYNGc5AJgc4vGhjs5+g==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-slot": "1.0.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-slot": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.2.tgz", - "integrity": "sha512-YeTpuq4deV+6DusvVUW4ivBgnkHwECUu0BiN43L5UCDFgdhsRUWAghhTF5MbvNTPzmiFOx90asDSUjWuCNapwg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-callback-ref": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.1.tgz", - "integrity": "sha512-D94LjX4Sp0xJFVaoQOd3OO9k7tpBYNOXdVhkltUbGv2Qb9OXdrg/CpsjlZv7ia14Sylv398LswWBVVu5nqKzAQ==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-controllable-state": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.1.tgz", - "integrity": "sha512-Svl5GY5FQeN758fWKrjM6Qb7asvXeiZltlT4U2gVfl8Gx5UAv2sMR0LWo8yhsIZh2oQ0eFdZ59aoOOMV7b47VA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-escape-keydown": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.3.tgz", - "integrity": "sha512-vyL82j40hcFicA+M4Ex7hVkB9vHgSse1ZWomAqV2Je3RleKGO5iM8KMOEtfoSB0PnIelMd2lATjTGMYqN5ylTg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-callback-ref": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-layout-effect": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.1.tgz", - "integrity": "sha512-v/5RegiJWYdoCvMnITBkNNx6bCj20fiaJnWtRkU18yITptraXjffz5Qbn05uOiQnOvi+dbkznkoaMltz1GnszQ==", - "dependencies": { - "@babel/runtime": "^7.13.10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-rect": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.0.1.tgz", - "integrity": "sha512-Cq5DLuSiuYVKNU8orzJMbl15TXilTnJKUCltMVQg53BQOF1/C5toAaGrowkgksdBQ9H+SRL23g0HDmg9tvmxXw==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/rect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-size": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.0.1.tgz", - "integrity": "sha512-ibay+VqrgcaI6veAojjofPATwledXiSmX+C0KrBk/xgpX9rBzPV3OsfwlhQdUOFbh+LKQorLYT+xTXW9V8yd0g==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-use-layout-effect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/rect": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.0.1.tgz", - "integrity": "sha512-fyrgCaedtvMg9NK3en0pnOYJdtfwxUcNolezkNPUsoX57X8oQk+NkqcvzHXD2uKNij6GXmWU9NDru2IWjrO4BQ==", - "dependencies": { - "@babel/runtime": "^7.13.10" - } - }, - "node_modules/aria-hidden": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.3.tgz", - "integrity": "sha512-xcLxITLe2HYa1cnYnwCjkOO1PqUHQpozB8x9AR0OgWN2woOBi5kSDVxKfd0b7sb1hw5qFeJhXm9H1nu3xSfLeQ==", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/cmdk": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/cmdk/-/cmdk-0.2.0.tgz", - "integrity": "sha512-JQpKvEOb86SnvMZbYaFKYhvzFntWBeSZdyii0rZPhKJj9uwJBxu4DaVYDrRN7r3mPop56oPhRw+JYWTKs66TYw==", - "dependencies": { - "@radix-ui/react-dialog": "1.0.0", - "command-score": "0.1.2" - }, - "peerDependencies": { - "react": "^18.0.0", - "react-dom": "^18.0.0" - } - }, - "node_modules/command-score": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/command-score/-/command-score-0.1.2.tgz", - "integrity": "sha512-VtDvQpIJBvBatnONUsPzXYFVKQQAhuf3XTNOAsdBxCNO/QCtUUd8LSgjn0GVarBkCad6aJCZfXgrjYbl/KRr7w==" - }, - "node_modules/detect-node-es": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", - "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" - }, - "node_modules/get-nonce": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", - "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", - "engines": { - "node": ">=6" - } - }, - "node_modules/invariant": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", - "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "dependencies": { - "loose-envify": "^1.0.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/react": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", - "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", - "peer": true, - "dependencies": { - "loose-envify": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", - "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", - "peer": true, - "dependencies": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" - }, - "peerDependencies": { - "react": "^18.2.0" - } - }, - "node_modules/react-remove-scroll": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", - "integrity": "sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw==", - "dependencies": { - "react-remove-scroll-bar": "^2.3.3", - "react-style-singleton": "^2.2.1", - "tslib": "^2.1.0", - "use-callback-ref": "^1.3.0", - "use-sidecar": "^1.1.2" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/react-remove-scroll-bar": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.4.tgz", - "integrity": "sha512-63C4YQBUt0m6ALadE9XV56hV8BgJWDmmTPY758iIJjfQKt2nYwoUrPk0LXRXcB/yIj82T1/Ixfdpdk68LwIB0A==", - "dependencies": { - "react-style-singleton": "^2.2.1", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/react-style-singleton": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz", - "integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==", - "dependencies": { - "get-nonce": "^1.0.0", - "invariant": "^2.2.4", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/regenerator-runtime": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz", - "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==" - }, - "node_modules/scheduler": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", - "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", - "peer": true, - "dependencies": { - "loose-envify": "^1.1.0" - } - }, - "node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/use-callback-ref": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.0.tgz", - "integrity": "sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w==", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/use-sidecar": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz", - "integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==", - "dependencies": { - "detect-node-es": "^1.1.0", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "^16.9.0 || ^17.0.0 || ^18.0.0", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - } - } -} diff --git a/package.json b/package.json deleted file mode 100644 index 33d31f0d1..000000000 --- a/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "dependencies": { - "@radix-ui/react-popover": "^1.0.7", - "cmdk": "^0.2.0" - } -} diff --git a/poetry.lock b/poetry.lock index 546f5f45b..b7b0e66a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -167,20 +167,19 @@ files = [ [[package]] name = "anthropic" -version = "0.26.1" +version = "0.28.0" description = "The official Python library for the anthropic API" optional = false python-versions = ">=3.7" files = [ - {file = "anthropic-0.26.1-py3-none-any.whl", hash = "sha256:2812b9b250b551ed8a1f0a7e6ae3f005654098994f45ebca5b5808bd154c9628"}, - {file = "anthropic-0.26.1.tar.gz", hash = "sha256:26680ff781a6f678a30a1dccd0743631e602b23a47719439ffdef5335fa167d8"}, + {file = "anthropic-0.28.0-py3-none-any.whl", hash = "sha256:2b620b21aee3d20c5d8005483c34df239d53ae895687113b26b8a36892a7e20f"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" -jiter = ">=0.1.0,<1" +jiter = ">=0.4.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tokenizers = ">=0.13.0" @@ -192,13 +191,13 @@ vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -472,17 +471,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.112" +version = "1.34.116" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.112-py3-none-any.whl", hash = "sha256:4cf28ce2c19a4e4963f1cb1f9b659a548f840f88af3e2da727b35ceb104f9223"}, - {file = "boto3-1.34.112.tar.gz", hash = "sha256:1092ac6c68acdd33051ed0d2b7cb6f5a4527c5d1535a48cda53f7012accde206"}, + {file = "boto3-1.34.116-py3-none-any.whl", hash = "sha256:e7f5ab2d1f1b90971a2b9369760c2c6bae49dae98c084a5c3f5c78e3968ace15"}, + {file = "boto3-1.34.116.tar.gz", hash = "sha256:53cb8aeb405afa1cd2b25421e27a951aeb568026675dec020587861fac96ac87"}, ] [package.dependencies] -botocore = ">=1.34.112,<1.35.0" +botocore = ">=1.34.116,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -491,13 +490,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.112" +version = "1.34.116" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.112-py3-none-any.whl", hash = "sha256:637f568a6c3322fb7e5ee55e0c5367324a15a331e87a497783ac6209253dde30"}, - {file = "botocore-1.34.112.tar.gz", hash = "sha256:053495953910bcf95d336ab1adb13efb70edc5462932eff180560737ad069319"}, + {file = "botocore-1.34.116-py3-none-any.whl", hash = "sha256:ec4d42c816e9b2d87a2439ad277e7dda16a4a614ef6839cf66f4c1a58afa547c"}, + {file = "botocore-1.34.116.tar.gz", hash = "sha256:269cae7ba99081519a9f87d7298e238d9e68ba94eb4f8ddfa906224c34cb8b6c"}, ] [package.dependencies] @@ -1310,63 +1309,63 @@ files = [ [[package]] name = "coverage" -version = "7.5.1" +version = "7.5.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"}, - {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"}, - {file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"}, - {file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"}, - {file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"}, - {file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"}, - {file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"}, - {file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"}, - {file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"}, - {file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"}, - {file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"}, - {file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"}, - {file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"}, - {file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, ] [package.dependencies] @@ -1807,13 +1806,13 @@ gmpy2 = ["gmpy2"] [[package]] name = "elastic-transport" -version = "8.13.0" +version = "8.13.1" description = "Transport classes and utilities shared among Python Elastic client libraries" optional = false python-versions = ">=3.7" files = [ - {file = "elastic-transport-8.13.0.tar.gz", hash = "sha256:2410ec1ff51221e8b3a01c0afa9f0d0498e1386a269283801f5c12f98e42dc45"}, - {file = "elastic_transport-8.13.0-py3-none-any.whl", hash = "sha256:aec890afdddd057762b27ff3553b0be8fa4673ec1a4fd922dfbd00325874bb3d"}, + {file = "elastic_transport-8.13.1-py3-none-any.whl", hash = "sha256:5d4bb6b8e9d74a9c16de274e91a5caf65a3a8d12876f1e99152975e15b2746fe"}, + {file = "elastic_transport-8.13.1.tar.gz", hash = "sha256:16339d392b4bbe86ad00b4bdeecff10edf516d32bc6c16053846625f2c6ea250"}, ] [package.dependencies] @@ -1825,13 +1824,13 @@ develop = ["aiohttp", "furo", "httpx", "mock", "opentelemetry-api", "opentelemet [[package]] name = "elasticsearch" -version = "8.13.1" +version = "8.13.2" description = "Python client for Elasticsearch" optional = false python-versions = ">=3.7" files = [ - {file = "elasticsearch-8.13.1-py3-none-any.whl", hash = "sha256:ba2b3f8b30a7a81beae690f1cede52fbcfd29baf3ef5bb028d4fa86972feebd8"}, - {file = "elasticsearch-8.13.1.tar.gz", hash = "sha256:1594d2d1293672db62525bc4688d86cdaf118db0f901808db28dc90ad19b81e1"}, + {file = "elasticsearch-8.13.2-py3-none-any.whl", hash = "sha256:7412ceae9c0e437a72854ab3123aa1f37110d1635cc645366988b8c0fee98598"}, + {file = "elasticsearch-8.13.2.tar.gz", hash = "sha256:d51c93431a459b2b7c6c919b6e92a2adc8ac712758de9aeeb16cd4997fc148ad"}, ] [package.dependencies] @@ -1840,7 +1839,7 @@ elastic-transport = ">=8.13,<9" [package.extras] async = ["aiohttp (>=3,<4)"] orjson = ["orjson (>=3)"] -requests = ["requests (>=2.4.0,<3.0.0)"] +requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"] vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] [[package]] @@ -2539,13 +2538,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.130.0" +version = "2.131.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.130.0.tar.gz", hash = "sha256:2bba3122b82a649c677b8a694b8e2bbf2a5fbf3420265caf3343bb88e2e9f0ae"}, - {file = "google_api_python_client-2.130.0-py2.py3-none-any.whl", hash = "sha256:7d45a28d738628715944a9c9d73e8696e7e03ac50b7de87f5e3035cefa94ed3a"}, + {file = "google-api-python-client-2.131.0.tar.gz", hash = "sha256:1c03e24af62238a8817ecc24e9d4c32ddd4cb1f323b08413652d9a9a592fc00d"}, + {file = "google_api_python_client-2.131.0-py2.py3-none-any.whl", hash = "sha256:e325409bdcef4604d505d9246ce7199960a010a0569ac503b9f319db8dbdc217"}, ] [package.dependencies] @@ -2595,13 +2594,13 @@ httplib2 = ">=0.19.0" [[package]] name = "google-cloud-aiplatform" -version = "1.52.0" +version = "1.53.0" description = "Vertex AI API client library" optional = false python-versions = ">=3.8" files = [ - {file = "google-cloud-aiplatform-1.52.0.tar.gz", hash = "sha256:932a56e3050b4bc9a2c0630e6af3c0bd52f0bcf72b5dc01c059874231099edd3"}, - {file = "google_cloud_aiplatform-1.52.0-py2.py3-none-any.whl", hash = "sha256:8c62f5d0ec39e008737ebba4875105ed7563dd0958f591f95dc7816e4b30f92a"}, + {file = "google-cloud-aiplatform-1.53.0.tar.gz", hash = "sha256:574cfad8ac5fa5d57ef717f5335ce05636a5fa9b8aeea0f5c325b46b9448e6b1"}, + {file = "google_cloud_aiplatform-1.53.0-py2.py3-none-any.whl", hash = "sha256:9dfb1f110e6d4795b45afcfab79108fc5c8ed9aa4eaf899e433bc2ca1b76c778"}, ] [package.dependencies] @@ -3545,22 +3544,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.0" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, - {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" @@ -3637,13 +3636,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio [[package]] name = "ipython" -version = "8.24.0" +version = "8.25.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" files = [ - {file = "ipython-8.24.0-py3-none-any.whl", hash = "sha256:d7bf2f6c4314984e3e02393213bab8703cf163ede39672ce5918c51fe253a2a3"}, - {file = "ipython-8.24.0.tar.gz", hash = "sha256:010db3f8a728a578bb641fdd06c063b9fb8e96a9464c63aec6310fbcb5e80501"}, + {file = "ipython-8.25.0-py3-none-any.whl", hash = "sha256:53eee7ad44df903a06655871cbab66d156a051fd86f3ec6750470ac9604ac1ab"}, + {file = "ipython-8.25.0.tar.gz", hash = "sha256:c6ed726a140b6e725b911528f80439c534fac915246af3efc39440a6b0f9d716"}, ] [package.dependencies] @@ -3662,7 +3661,7 @@ typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] kernel = ["ipykernel"] matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] @@ -3722,72 +3721,72 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.4.0" +version = "0.4.1" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.4.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4aa6226d82a4a4505078c0bd5947bad65399635fc5cd4b226512e41753624edf"}, - {file = "jiter-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:947111ac906740a948e7b63799481acd3d5ef666ccb178d146e25718640b7408"}, - {file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69572ffb4e84ae289a7422b9af4ea123cae2ce0772228859b37d4b26b4bc92ea"}, - {file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba6046cbb5d1baa5a781b846f7e5438596a332f249a857d63f86ef5d1d9563b0"}, - {file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4f346e54602782e66d07df0d1c7389384fd93680052ed6170da2c6dc758409e"}, - {file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49110ce693f07e97d61089d894cea05a0b9894d5ccc6ac6fc583028726c8c8af"}, - {file = "jiter-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e358df6fd129f3a4e087539f086355ad0107e5da16dbc8bc857d94222eaeed5"}, - {file = "jiter-0.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb852ca39a48f3c049def56f0d1771b32e948e4f429a782d14ef4cc64cfd26e"}, - {file = "jiter-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:44dc045decb2545bffe2da04ea4c36d9438d3f3d49fc47ed423ea75c352b712e"}, - {file = "jiter-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:413adb15372ac63db04373240f40925788e4282c997eeafc2040530049a0a599"}, - {file = "jiter-0.4.0-cp310-none-win32.whl", hash = "sha256:0b48ea71673a97b897e4b94bbc871e62495a5a85f836c9f90712a4c70aa3ef7e"}, - {file = "jiter-0.4.0-cp310-none-win_amd64.whl", hash = "sha256:6a1c84b44afafaf0ba6223679cf17af664b889da14da31d8af3595fd977d96fa"}, - {file = "jiter-0.4.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b2cc498345fa37ca23fbc20271a553aa46e6eb00924600f49b7dc4b2aa8952ee"}, - {file = "jiter-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:69f7221ac09ab421abf04f89942026868297c568133998fb181bcf435760cbf3"}, - {file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d01c52f3e5a56ae73af36bd13797dd1a56711eb522748e5e84d15425b3f10"}, - {file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:39be97d5ce0c4d0dae28c23c03a0af0501a725589427e99763f99c42e18aa402"}, - {file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eac2ed1ec1e577b92b7ea2d4e6de8aec0c1164defd8af8affdc8ec0f0ec2904a"}, - {file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6258837d184c92c9cb91c983c310ad7269d41afb49d34f00ca9246e073943a03"}, - {file = "jiter-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123c2a77b066bf17a4d021e238e8351058cfa56b90ac04f2522d120dc64ea055"}, - {file = "jiter-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2df939f792c7a40e55f36700417db551b9f6b84d348990fa0f2c608adeb1f11b"}, - {file = "jiter-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb1b09b16d40cf9ba1d11ba11e5b96ad29286a6a1c4ad5e6a2aef5e352a89f5d"}, - {file = "jiter-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0efb4208889ebdbf933bf08dbcbc16e64ffd34c8e2b28044ee142789a9dc3a67"}, - {file = "jiter-0.4.0-cp311-none-win32.whl", hash = "sha256:20545ac1b68e7e5b066a1e8347840c9cebdd02ace65faae2e655fc02ec5c915c"}, - {file = "jiter-0.4.0-cp311-none-win_amd64.whl", hash = "sha256:6b300f9887c8e4431cd03a974ea3e4f9958885636003c3864220a9b2d2f8462b"}, - {file = "jiter-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:923432a0563bbae404ff25bb010e348514a69bfab979f2f8119b23b625dbf6d9"}, - {file = "jiter-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab8bb0ec8b97cec4422dc8b37b525442d969244488c805b834609ab0ccd788e2"}, - {file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b857adb127b9c533907226791eafa79c5038c3eb5a477984994bf7c4715ba518"}, - {file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2609cc0d1d8d470e921ff9a604afeb4c701bbe13e00bd9834d5aa6e7ea732a9b"}, - {file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d39e99f8b7df46a119b6f84321f6ba01f16fa46abfa765d44c05c486d8e66829"}, - {file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:56de8b518ebfe76a70f856741f6de248ce396c50a87acef827b6e8388e3a502d"}, - {file = "jiter-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488b7e777be47f67ce1a1f8f8eb907f9bbd81af5c03784a9bab09d025c250233"}, - {file = "jiter-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ea35e0ecbb5dadd457855eb980dcc548c14cf5341bcd22a43814cb56f2bcc79"}, - {file = "jiter-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e1a9e9ee69c80b63951c93226b68d0e955953f64fe758bad2afe7ef7f9016af9"}, - {file = "jiter-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:78e2f3cc2a32a21d43ccc5afcf66f5d17e827ccc4e6d21c0b353bdad2c7dcc9c"}, - {file = "jiter-0.4.0-cp312-none-win32.whl", hash = "sha256:eeaa7a2b47a99f4ebbb4142bb58b95617e09f24c87570f6a57d2770687c9ddbe"}, - {file = "jiter-0.4.0-cp312-none-win_amd64.whl", hash = "sha256:8d4a78b385b93ff59a67215d26000fcb4789a388fca3730d1b60fab17fc81e3c"}, - {file = "jiter-0.4.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ebf20a3fac1089ce26963bf04140da0f803d55332ec69d59c5a87cf1a87d29c4"}, - {file = "jiter-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d62244ffc6a168187452277adeefb7b2c30170689c6bf543a51e98e8c17ddab7"}, - {file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40b2cde77446a41cec595739fd168be87edff2428eaf7c3438231224dd0ab7a5"}, - {file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e51fc0a22021ec8905b9b00a2f7d25756f2ff7a653e35a790a2067ae126b51f6"}, - {file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a56e6f980b89d7cfe5c43811dcf52d6f37b319428a4540511235dafda9ea7808"}, - {file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fec16adab8d3d3d6d74e3711a1f380836ebeab2a20e3f88cfe2ec5094d8b84"}, - {file = "jiter-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e3de515801c954e8f1dc1f575282a4a86df9e782d4993ea1ed2be9a8dedaa0"}, - {file = "jiter-0.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17e0ad8abf0bb04d81810eaeaab35d2c99b5da11fcd1058e0a389607ff6503b0"}, - {file = "jiter-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8dc0132b728f3b3e90ff0d1874504cd49c78f3553bf3745168a7fc0b4cf674e1"}, - {file = "jiter-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81a883104aa96e494d3d28eaf7070780d03ecee8ccfdfaf7e4899710340c47f1"}, - {file = "jiter-0.4.0-cp38-none-win32.whl", hash = "sha256:a044c53ab1aaa4af624ac9574181b5bad8e260aea7e03104738156511433deba"}, - {file = "jiter-0.4.0-cp38-none-win_amd64.whl", hash = "sha256:d920035c869053e3d9a0b3ff94384d16a8ef5fde3dea55f97bd29916f6e27554"}, - {file = "jiter-0.4.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:091e978f4e586a2f1c69bf940d45f4e6a23455877172a0ab7d6de04a3b119299"}, - {file = "jiter-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79134b2d601309bcbe3304a262d7d228ad61d53c80883231c637773000a6d683"}, - {file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c471473e0b05058b5d729ff04271b6d45a575ac8bd9948563268c734b380ac7e"}, - {file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb84b8930da8b32b0b1fdff9817e2c4b47e8981b5647ad11c4975403416e4112"}, - {file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f2805e28941751ebfe0948596a64cde4cfb9b84bea5282affd020063e659c96"}, - {file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42ef59f9e513bf081a8b5c5578933ea9c3a63e559e6e3501a3e72edcd456ff5e"}, - {file = "jiter-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae12e3906f9e565120ab569de261b738e3a1ec50c40e30c67499e4f893e9a8c"}, - {file = "jiter-0.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:264dc1324f45a793bc89af4f653225229eb17bca9ec7107dce6c8fb4fe68d20f"}, - {file = "jiter-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a1c172ec47d846e25881dfbd52438ddb690da4ea04d185e477abd3db6c32f8a"}, - {file = "jiter-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ccde31d0bc114aedad0dbd71b7f63ba0f0eecd7ec9ae1926a0ca01c1eb2854e7"}, - {file = "jiter-0.4.0-cp39-none-win32.whl", hash = "sha256:13139b05792fbc13a0f9a5b4c89823ea0874141decae1b8f693f12bb1d28e061"}, - {file = "jiter-0.4.0-cp39-none-win_amd64.whl", hash = "sha256:3a729b2631c6d5551a41069697415fee9659c3eadc9ab87369376ba51930cd00"}, - {file = "jiter-0.4.0.tar.gz", hash = "sha256:68203e02e0419bc3eca717c580c2d8f615aeee1150e2a1fb68d6600a7e52a37c"}, + {file = "jiter-0.4.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3c2370cd8826b484f3fc6ed729cb58510ba24b4bc277c92323a57d35cf4df223"}, + {file = "jiter-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3587af23140a2eb282bba980010dae60f3b8b1579a034c5e869e9b94220a5972"}, + {file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df8788d34545d47de864032a78bae49a14b66b67196c73cd95f1c1e3081d9c73"}, + {file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91bf2d31e906a3ca26fc8ee0cb979e0e51b12aa7e83999c6afea047538f95e5c"}, + {file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8586e68702666b6acd919c65f718a09603adcfd8b4c7026bade2441d9e7bd34e"}, + {file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:639b766bde088546b5205fd31608502b5b42abee3294b43cc95c6ea8f9a257c3"}, + {file = "jiter-0.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cb32457296351c98da289d21a092a6c53c75beb80e7127c8e16224ee342c7c7"}, + {file = "jiter-0.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:583263bd81bce5426806cf27ba85e4b97746797fae13c71e50a8689e06e57f81"}, + {file = "jiter-0.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c0e413999a819ccef9b5fd22ef4b9b8c48a98e49da4d09b43ebce286d0d80e26"}, + {file = "jiter-0.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5e50468d5acfef335ba8bc3892bb304354c38ba18acb3f7ae428451e47136e49"}, + {file = "jiter-0.4.1-cp310-none-win32.whl", hash = "sha256:b2ac90b94dd717644c61c8ed0c2ec6e9505bd7314b91a1549680d7f1cb8f1da4"}, + {file = "jiter-0.4.1-cp310-none-win_amd64.whl", hash = "sha256:2509868b8dacf4f65d04b4d951d390f30f403a87a997a14e2db2d232c7a468a7"}, + {file = "jiter-0.4.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b56e4f2fa5767976f2332e9e067010ddfe1379b6456b5458123ba50657c33e02"}, + {file = "jiter-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f813b49db21c946aa010accc54b8e5c9d0007be252bda4738159fa6c65d6d396"}, + {file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2933c04ebd77b3e9cf34f80ba45c093739c687c9c5a4fd0a8c701a3bfd90940"}, + {file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b02ddd65513705ec38211ea48ffc0fde41aa46166d9f7706972daf97b57c8599"}, + {file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:88d06af883524e5429d75395bb4ee6ddeda4c30818b2f3e3b8f4afa2dd8f28c0"}, + {file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd333eca1090cf21e6359721eecbb2a7fe031cc4db3dd595081430b4a59371c5"}, + {file = "jiter-0.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdc90017cd22dca6b40f2f8518b38363e78aee3cb32f84e1cb08900a598ca91b"}, + {file = "jiter-0.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aedce5b11ca58853d46461e1880079836bfab4e132be2b7d2093ec193081bbc8"}, + {file = "jiter-0.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e55b2f4d2d5066979b0e0e58d85e3fffd0f6e6a0523aab7e0ce75950259387da"}, + {file = "jiter-0.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3c85c586f1cd71c2a1e78756f6857119947b532379bd9be4338bf3dacf1e87f"}, + {file = "jiter-0.4.1-cp311-none-win32.whl", hash = "sha256:37875f56222f2bb61410e15196d9b91510ccca322c391f3d20c91d667130d15e"}, + {file = "jiter-0.4.1-cp311-none-win_amd64.whl", hash = "sha256:b71758befea8dbdc10e0fb40a776e085eed0e74afef42468ebb58562289e9190"}, + {file = "jiter-0.4.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:93a8869c18a3721e41d7adb289c5c71aea8887eb368a3411219a0afb62955cbe"}, + {file = "jiter-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ffbc61349f2f27676d40d68e8ef83fc2a9dd2c1464962b1d1b1d8504bccbf85"}, + {file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f1f33e9fd4de4369a8d00fdf2571a8246a942095fb2a9d4cd25135ee675c85"}, + {file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8f91a19eba23b4a1bb1e5b64c19cfdbf46604180e5dee40548b53ca13afd2d9"}, + {file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a60f8e495448d8e02d291fa9a8522cfe775a10210ba428994f383965e6f6e65"}, + {file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7387998c6585ce0f02ae4f5338fabf72b99494860c347f27bc34720290eafb15"}, + {file = "jiter-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7cbf41da6506b42db21a1a0befa48e16384591e84e80db002a826ccf07668f1"}, + {file = "jiter-0.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:793ae2499722b9fc31e300abd07418902512109bca17f617598a31a9e17bddce"}, + {file = "jiter-0.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:19f7953b8ada7ee109764ad91d4afb1a9f69b77cde0b890844744c513612dbf8"}, + {file = "jiter-0.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dcd3d6a142d7b267a8c5f1e28d02759e2e29343b095f6d8aaf463333a842e1f8"}, + {file = "jiter-0.4.1-cp312-none-win32.whl", hash = "sha256:fffdf137c3ab7f0c5facb7c478b57ad3e1eb9b149daff48687844de77b78ab70"}, + {file = "jiter-0.4.1-cp312-none-win_amd64.whl", hash = "sha256:fde004e47a801512c4167f188a6372960374fbd59e635753b3ee536e81953eb3"}, + {file = "jiter-0.4.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b429ba25e05ca28d5e7efa4249032746ac28ec6ad68017ed3ea009989c597911"}, + {file = "jiter-0.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:27df9925d0282c80bdd41613ace7cd799bd6355acdfe25cc48ec16843541999e"}, + {file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb68736a0e2b00eda83937c1937f999e8d7dab68820c04343ac2e2eb2c5c2193"}, + {file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c218458ac32ce0b495f013293867649b40c067a4d7533fa0d70a46f7194febae"}, + {file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebead86e80e352753f6e6f78ca96c12d764a8dbbc7c4b25938ce657ab0e4e879"}, + {file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf58f878d43294bea400a9df86ef7796dd2e67969109bce22d337ca77372c69"}, + {file = "jiter-0.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba671e60570cd99b8ed83ce0d82703040dc34c793229ac607f09683ba1981163"}, + {file = "jiter-0.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef0bd8b68ad14f045544989b6ad3758bee6dc01f6924bce5b4fd7060b0a09b1b"}, + {file = "jiter-0.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7888f165a0fe285e015ee18cfcb8b5432c4fa389235b4c24c339ca0cc51ba979"}, + {file = "jiter-0.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d9c443b2a71a8c3ab6578f5faf7725ad5f63dbb92d87f820eec56de9da0560f"}, + {file = "jiter-0.4.1-cp38-none-win32.whl", hash = "sha256:6f618d1b04493bc9196e466ef59e0a6388eb85e936d1a61833449677643bbdd9"}, + {file = "jiter-0.4.1-cp38-none-win_amd64.whl", hash = "sha256:46b6364a0b2a81cc259768bda131e8528aa3af4312f23f7e10aa04d24f54bbb1"}, + {file = "jiter-0.4.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6680785a9273a87e463c86a962042d620c00c7bb8100dde1a4c78b2184cdd613"}, + {file = "jiter-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:36b10d945b9ccd2e9f2720e37395daf9e63cfa47e5e0e2887c4931888f0800cd"}, + {file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78820599693bda34be17119abf9fad1f02e501b4816e47addbee9c5c768fb361"}, + {file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68697317170d8f851dfe978ba278b886e54e837ecd2a80c4a33ae780a0f19526"}, + {file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d126ffc3876cfc1fba6ae2be37f2532b5db593a96cf4b845724b50b44339c4fd"}, + {file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b082223f2e7e6f506d837df935f58f25cabf0a2b35902b4ec73fb561fbf2694a"}, + {file = "jiter-0.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13314287782782be8439dfafca50f13fcab18046227068a3a8e8d8ac888f092b"}, + {file = "jiter-0.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da1346375605926f1ca4604d154ff41f5e3b933c6e01005e534bca2197d919f"}, + {file = "jiter-0.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9b67a97fbce3ec35ee97439c8b786393f71ecbe7458d5e9279d4c172772eac36"}, + {file = "jiter-0.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7b0f34544923bff0f3393aa3d60087686d86089c9361f6530bb5d19ebfb3db47"}, + {file = "jiter-0.4.1-cp39-none-win32.whl", hash = "sha256:b0c93ef95b896a4ce5edff23071e4dcad77c9e9262fcb6ca2b050f781e8335a9"}, + {file = "jiter-0.4.1-cp39-none-win_amd64.whl", hash = "sha256:3db5c83c8655ce031943b6f08434dac1a91e1477b0df452de0c44f3390a9b22c"}, + {file = "jiter-0.4.1.tar.gz", hash = "sha256:741851cf5f37cf3583f2a56829d734c9fd17334770c9a326e6d25291603d4278"}, ] [[package]] @@ -4075,20 +4074,37 @@ langchain-core = ">=0.1.43,<0.3" [[package]] name = "langchain-astradb" -version = "0.3.2" +version = "0.3.3" description = "An integration package connecting Astra DB and LangChain" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_astradb-0.3.2-py3-none-any.whl", hash = "sha256:15afc5c0105e863e8f57bf8686490c00be47ed05e47d3263ad1577f2031c0dd5"}, - {file = "langchain_astradb-0.3.2.tar.gz", hash = "sha256:4316f2c59402779a347a811e1b5470a0570348cb89baac17472d860b63188122"}, + {file = "langchain_astradb-0.3.3-py3-none-any.whl", hash = "sha256:39deef1253947ef1bfaf3c27881ecdf07621d96c2cf37814aed9e506a9bee217"}, + {file = "langchain_astradb-0.3.3.tar.gz", hash = "sha256:f9a996ec4bef134896195430adeb7f264389c368a03d2ea91356837e8ddde091"}, ] [package.dependencies] -astrapy = ">=1,<2" +astrapy = ">=1.2,<2.0" langchain-core = ">=0.1.31,<0.3" numpy = ">=1,<2" +[[package]] +name = "langchain-chroma" +version = "0.1.1" +description = "An integration package connecting Chroma and LangChain" +optional = false +python-versions = "<3.13,>=3.8.1" +files = [ + {file = "langchain_chroma-0.1.1-py3-none-any.whl", hash = "sha256:7346ba749e5c5735e2a659bc5e3bb2901177bd08448d61682db5a7f882e27b87"}, + {file = "langchain_chroma-0.1.1.tar.gz", hash = "sha256:fb17c0cc591a425179958ca8cdb25d6cc9e43f4954a1ad4f3fe9cc2d306c455a"}, +] + +[package.dependencies] +chromadb = ">=0.4.0,<0.6.0" +fastapi = ">=0.95.2,<1" +langchain-core = ">=0.1.40,<0.3" +numpy = ">=1,<2" + [[package]] name = "langchain-cohere" version = "0.1.5" @@ -4133,18 +4149,18 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15. [[package]] name = "langchain-core" -version = "0.2.1" +version = "0.2.3" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.2.1-py3-none-any.whl", hash = "sha256:3521e1e573988c47399fca9739270c5d34f8ecec147253ad829eb9ff288f76d5"}, - {file = "langchain_core-0.2.1.tar.gz", hash = "sha256:49383126168d934559a543ce812c485048d9e6ac9b6798fbf3d4a72b6bba5b0c"}, + {file = "langchain_core-0.2.3-py3-none-any.whl", hash = "sha256:22189b5a3a30bfd65eb995f95e627f7c2c3acb322feb89f5f5f2fb7df21833a7"}, + {file = "langchain_core-0.2.3.tar.gz", hash = "sha256:fbc75a64b9c0b7655d96ca57a707df1e6c09efc1539c36adbd73260612549810"}, ] [package.dependencies] jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.0,<0.2.0" +langsmith = ">=0.1.65,<0.2.0" packaging = ">=23.2,<24.0" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -4242,18 +4258,18 @@ tokenizers = ">=0.15.1,<1" [[package]] name = "langchain-openai" -version = "0.1.7" +version = "0.1.8" description = "An integration package connecting OpenAI and LangChain" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_openai-0.1.7-py3-none-any.whl", hash = "sha256:39c3cb22bb739900ae8294d4d9939a6138c0ca7ad11198e57038eb14c08d04ec"}, - {file = "langchain_openai-0.1.7.tar.gz", hash = "sha256:fd7e1c33ba8e2cab4b2154f3a2fd4a0d9cc6518b41cf49bb87255f9f732a4896"}, + {file = "langchain_openai-0.1.8-py3-none-any.whl", hash = "sha256:8125c84223e9f43b05defbca64eedbcf362fd78a680de6c25e64f973b34a8063"}, + {file = "langchain_openai-0.1.8.tar.gz", hash = "sha256:a11fcce15def7917c44232abda6baaa63dfc79fe44be1531eea650d39a44cd95"}, ] [package.dependencies] -langchain-core = ">=0.1.46,<0.3" -openai = ">=1.24.0,<2.0.0" +langchain-core = ">=0.2.2,<0.3" +openai = ">=1.26.0,<2.0.0" tiktoken = ">=0.7,<1" [[package]] @@ -4291,13 +4307,13 @@ extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"] [[package]] name = "langchainhub" -version = "0.1.15" +version = "0.1.17" description = "The LangChain Hub API client" optional = false -python-versions = ">=3.8.1,<4.0" +python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchainhub-0.1.15-py3-none-any.whl", hash = "sha256:89a0951abd1db255e91c6d545d092a598fc255aa865d1ffc3ce8f93bbeae60e7"}, - {file = "langchainhub-0.1.15.tar.gz", hash = "sha256:fa3ff81a31946860f84c119f1e2f6b7c7707e2bd7ed2394a7313b286d59f3bda"}, + {file = "langchainhub-0.1.17-py3-none-any.whl", hash = "sha256:4c609b3948252c71670f0d98f73413b515cfd2f6701a7b40ce959203e6133e04"}, + {file = "langchainhub-0.1.17.tar.gz", hash = "sha256:af7df0cb1cebc7a6e0864e8632ae48ecad39ed96568f699c78657b9d04e50b46"}, ] [package.dependencies] @@ -4306,7 +4322,7 @@ types-requests = ">=2.31.0.2,<3.0.0.0" [[package]] name = "langflow-base" -version = "0.0.48" +version = "0.0.54" description = "A Python package with a built-in web application" optional = false python-versions = ">=3.10,<3.13" @@ -4340,6 +4356,7 @@ platformdirs = "^4.2.0" pydantic = "^2.7.0" pydantic-settings = "^2.2.0" pypdf = "^4.2.0" +pyperclip = "^1.8.2" python-docx = "^1.1.0" python-jose = "^3.3.0" python-multipart = "^0.0.7" @@ -4347,6 +4364,7 @@ python-socketio = "^5.11.0" rich = "^13.7.0" sqlmodel = "^0.0.18" typer = "^0.12.0" +uncurl = "^0.0.11" uvicorn = "^0.29.0" websockets = "*" @@ -4361,13 +4379,13 @@ url = "src/backend/base" [[package]] name = "langfuse" -version = "2.33.0" +version = "2.33.1" description = "A client library for accessing langfuse" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langfuse-2.33.0-py3-none-any.whl", hash = "sha256:362e3078c5a891df0b7ba3c9ce82f046d1f0274eab3d55337e443fff526f18ad"}, - {file = "langfuse-2.33.0.tar.gz", hash = "sha256:3ca2ef8539a8f28cb80135f4b46b80d5585ce183f8e2035f318be296d09d7d88"}, + {file = "langfuse-2.33.1-py3-none-any.whl", hash = "sha256:61ff3ff4b9c9c195028c981cba892106fdf90028e3950209a15f0ae06a378a36"}, + {file = "langfuse-2.33.1.tar.gz", hash = "sha256:444a870e8b13ad37df710931389ecd3bad9997e550edf3c3178b5a0bd7ada013"}, ] [package.dependencies] @@ -4385,13 +4403,13 @@ openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.63" +version = "0.1.67" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.63-py3-none-any.whl", hash = "sha256:7810afdf5e3f3b472fc581a29371fb96cd843dde2149e048d1b9610325159d1e"}, - {file = "langsmith-0.1.63.tar.gz", hash = "sha256:a609405b52f6f54df442a142cbf19ab38662d54e532f96028b4c546434d4afdf"}, + {file = "langsmith-0.1.67-py3-none-any.whl", hash = "sha256:7eb2e1c1b375925ff47700ed8071e10c15e942e9d1d634b4a449a9060364071a"}, + {file = "langsmith-0.1.67.tar.gz", hash = "sha256:149558669a2ac4f21471cd964e61072687bba23b7c1ccb51f190a8f59b595b39"}, ] [package.dependencies] @@ -4401,13 +4419,13 @@ requests = ">=2,<3" [[package]] name = "litellm" -version = "1.38.1" +version = "1.39.5" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.38.1-py3-none-any.whl", hash = "sha256:03e0bf79fbdf0285f5b2c185f8507056dea0481cb668a63fa1641058795af0c9"}, - {file = "litellm-1.38.1.tar.gz", hash = "sha256:8eed177d5883f11c3f7bdcc78d41379efbff921460c065534cf0f7ef011b0610"}, + {file = "litellm-1.39.5-py3-none-any.whl", hash = "sha256:1e8dd43c5d257fa8d7a0039b20aed7aeed4463d53608d1ba4ac233f1967a5330"}, + {file = "litellm-1.39.5.tar.gz", hash = "sha256:8f4ea9fe21d67890e81a578e12c30b4172260ff35971dc7c3edf7eb69167d3be"}, ] [package.dependencies] @@ -4427,12 +4445,12 @@ proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", " [[package]] name = "llama-cpp-python" -version = "0.2.75" +version = "0.2.76" description = "Python bindings for the llama.cpp library" optional = true python-versions = ">=3.8" files = [ - {file = "llama_cpp_python-0.2.75.tar.gz", hash = "sha256:aee9383935c42e812ee84265b1dafe5f0e3a20ee47216529b64a2ed6caaaed44"}, + {file = "llama_cpp_python-0.2.76.tar.gz", hash = "sha256:a4e2ab6b74dc87f565a21e4f1617c030f92d5b341375d7173876d238613a50ab"}, ] [package.dependencies] @@ -4447,292 +4465,6 @@ dev = ["black (>=23.3.0)", "httpx (>=0.24.1)", "mkdocs (>=1.4.3)", "mkdocs-mater server = ["PyYAML (>=5.1)", "fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "starlette-context (>=0.3.6,<0.4)", "uvicorn (>=0.22.0)"] test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] -[[package]] -name = "llama-index" -version = "0.10.38" -description = "Interface between LLMs and your data" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index-0.10.38-py3-none-any.whl", hash = "sha256:5d521b0ea7111679521292432960d3b9fb53c98d55414bd42d753bc6271d234d"}, - {file = "llama_index-0.10.38.tar.gz", hash = "sha256:5281cfa8b6e7f0f5f12897c00adcd790f7b51c130037f3561fd5630fca37bfb3"}, -] - -[package.dependencies] -llama-index-agent-openai = ">=0.1.4,<0.3.0" -llama-index-cli = ">=0.1.2,<0.2.0" -llama-index-core = ">=0.10.38,<0.11.0" -llama-index-embeddings-openai = ">=0.1.5,<0.2.0" -llama-index-indices-managed-llama-cloud = ">=0.1.2,<0.2.0" -llama-index-legacy = ">=0.9.48,<0.10.0" -llama-index-llms-openai = ">=0.1.13,<0.2.0" -llama-index-multi-modal-llms-openai = ">=0.1.3,<0.2.0" -llama-index-program-openai = ">=0.1.3,<0.2.0" -llama-index-question-gen-openai = ">=0.1.2,<0.2.0" -llama-index-readers-file = ">=0.1.4,<0.2.0" -llama-index-readers-llama-parse = ">=0.1.2,<0.2.0" - -[[package]] -name = "llama-index-agent-openai" -version = "0.2.5" -description = "llama-index agent openai integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_agent_openai-0.2.5-py3-none-any.whl", hash = "sha256:67536bb104b24734f79324207034d948a2ca7e4cc20dd60cf05d6eeb4b12a586"}, - {file = "llama_index_agent_openai-0.2.5.tar.gz", hash = "sha256:45f4cc670d037a8a67f541d3a4d095f7f61caff6ed2c25702441eb1116d4b495"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.35,<0.11.0" -llama-index-llms-openai = ">=0.1.5,<0.2.0" -openai = ">=1.14.0" - -[[package]] -name = "llama-index-cli" -version = "0.1.12" -description = "llama-index cli" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_cli-0.1.12-py3-none-any.whl", hash = "sha256:d80d546786f02d3f16f6183b8e86b22b8b5c33a1500923659f2ccbff8d5df634"}, - {file = "llama_index_cli-0.1.12.tar.gz", hash = "sha256:3cf1f706c3c69c6b1aab07fca7faad3959db1709808efd50491b669d38b0b580"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.11.post1,<0.11.0" -llama-index-embeddings-openai = ">=0.1.1,<0.2.0" -llama-index-llms-openai = ">=0.1.1,<0.2.0" - -[[package]] -name = "llama-index-core" -version = "0.10.38.post2" -description = "Interface between LLMs and your data" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_core-0.10.38.post2-py3-none-any.whl", hash = "sha256:b4b55449bac458d339e84d8d26f322b4dc9f36d3682ebb41fccf5594c295620f"}, - {file = "llama_index_core-0.10.38.post2.tar.gz", hash = "sha256:9eff6e16e9045deca9cb58bcf2a4b9ba39d0da12d7493e6aebaa5badd3b3ebb5"}, -] - -[package.dependencies] -aiohttp = ">=3.8.6,<4.0.0" -dataclasses-json = "*" -deprecated = ">=1.2.9.3" -dirtyjson = ">=1.0.8,<2.0.0" -fsspec = ">=2023.5.0" -httpx = "*" -llamaindex-py-client = ">=0.1.18,<0.2.0" -nest-asyncio = ">=1.5.8,<2.0.0" -networkx = ">=3.0" -nltk = ">=3.8.1,<4.0.0" -numpy = "*" -openai = ">=1.1.0" -pandas = "*" -pillow = ">=9.0.0" -PyYAML = ">=6.0.1" -requests = ">=2.31.0" -SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]} -tenacity = ">=8.2.0,<9.0.0" -tiktoken = ">=0.3.3" -tqdm = ">=4.66.1,<5.0.0" -typing-extensions = ">=4.5.0" -typing-inspect = ">=0.8.0" -wrapt = "*" - -[[package]] -name = "llama-index-embeddings-openai" -version = "0.1.10" -description = "llama-index embeddings openai integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_embeddings_openai-0.1.10-py3-none-any.whl", hash = "sha256:c3cfa83b537ded34d035fc172a945dd444c87fb58a89b02dfbf785b675f9f681"}, - {file = "llama_index_embeddings_openai-0.1.10.tar.gz", hash = "sha256:1bc1fc9b46773a12870c5d3097d3735d7ca33805f12462a8e35ae8a6e5ce1cf6"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.1,<0.11.0" - -[[package]] -name = "llama-index-indices-managed-llama-cloud" -version = "0.1.6" -description = "llama-index indices llama-cloud integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_indices_managed_llama_cloud-0.1.6-py3-none-any.whl", hash = "sha256:cba33e1a3677b2a2ae7f239119acbf6dc3818f105edc92315729842b56fbc949"}, - {file = "llama_index_indices_managed_llama_cloud-0.1.6.tar.gz", hash = "sha256:74b3b0e9ebf9d348d3054f9fc0c657031acceb9351c31116ad8d5a7ae4729f5c"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.0,<0.11.0" -llamaindex-py-client = ">=0.1.19,<0.2.0" - -[[package]] -name = "llama-index-legacy" -version = "0.9.48" -description = "Interface between LLMs and your data" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [ - {file = "llama_index_legacy-0.9.48-py3-none-any.whl", hash = "sha256:714ada95beac179b4acefa4d2deff74bb7b2f22b0f699ac247d4cb67738d16d4"}, - {file = "llama_index_legacy-0.9.48.tar.gz", hash = "sha256:82ddc4691edbf49533d65582c249ba22c03fe96fbd3e92f7758dccef28e43834"}, -] - -[package.dependencies] -aiohttp = ">=3.8.6,<4.0.0" -dataclasses-json = "*" -deprecated = ">=1.2.9.3" -dirtyjson = ">=1.0.8,<2.0.0" -fsspec = ">=2023.5.0" -httpx = "*" -nest-asyncio = ">=1.5.8,<2.0.0" -networkx = ">=3.0" -nltk = ">=3.8.1,<4.0.0" -numpy = "*" -openai = ">=1.1.0" -pandas = "*" -requests = ">=2.31.0" -SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]} -tenacity = ">=8.2.0,<9.0.0" -tiktoken = ">=0.3.3" -typing-extensions = ">=4.5.0" -typing-inspect = ">=0.8.0" - -[package.extras] -gradientai = ["gradientai (>=1.4.0)"] -html = ["beautifulsoup4 (>=4.12.2,<5.0.0)"] -langchain = ["langchain (>=0.0.303)"] -local-models = ["optimum[onnxruntime] (>=1.13.2,<2.0.0)", "sentencepiece (>=0.1.99,<0.2.0)", "transformers[torch] (>=4.33.1,<5.0.0)"] -postgres = ["asyncpg (>=0.28.0,<0.29.0)", "pgvector (>=0.1.0,<0.2.0)", "psycopg2-binary (>=2.9.9,<3.0.0)"] -query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "lm-format-enforcer (>=0.4.3,<0.5.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "scikit-learn", "spacy (>=3.7.1,<4.0.0)"] - -[[package]] -name = "llama-index-llms-openai" -version = "0.1.20" -description = "llama-index llms openai integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_llms_openai-0.1.20-py3-none-any.whl", hash = "sha256:f27401acdf9f65bf4d866a100615dcbd81987b890ae5fa9c513d544ba6d711e7"}, - {file = "llama_index_llms_openai-0.1.20.tar.gz", hash = "sha256:0282e4e252893487afd72383b46da5b28ddcd3fb73bace1caefce8a36e9cf492"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.24,<0.11.0" - -[[package]] -name = "llama-index-multi-modal-llms-openai" -version = "0.1.6" -description = "llama-index multi-modal-llms openai integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_multi_modal_llms_openai-0.1.6-py3-none-any.whl", hash = "sha256:0b6950a6cf98d16ade7d3b9dd0821ecfe457ca103819ae6c3e66cfc9634ca646"}, - {file = "llama_index_multi_modal_llms_openai-0.1.6.tar.gz", hash = "sha256:10de75a877a444af35306385faad9b9f0624391e55309970564114a080a0578c"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.1,<0.11.0" -llama-index-llms-openai = ">=0.1.1,<0.2.0" - -[[package]] -name = "llama-index-program-openai" -version = "0.1.6" -description = "llama-index program openai integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_program_openai-0.1.6-py3-none-any.whl", hash = "sha256:4660b338503537c5edca1e0dab606af6ce372b4f1b597e2833c6b602447c5d8d"}, - {file = "llama_index_program_openai-0.1.6.tar.gz", hash = "sha256:c6a4980c5ea826088b28b4dee3367edb20221e6d05eb0e05019049190131d772"}, -] - -[package.dependencies] -llama-index-agent-openai = ">=0.1.1,<0.3.0" -llama-index-core = ">=0.10.1,<0.11.0" -llama-index-llms-openai = ">=0.1.1,<0.2.0" - -[[package]] -name = "llama-index-question-gen-openai" -version = "0.1.3" -description = "llama-index question_gen openai integration" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [ - {file = "llama_index_question_gen_openai-0.1.3-py3-none-any.whl", hash = "sha256:1f83b49e8b2e665030d1ec8c54687d6985d9fa8426147b64e46628a9e489b302"}, - {file = "llama_index_question_gen_openai-0.1.3.tar.gz", hash = "sha256:4486198117a45457d2e036ae60b93af58052893cc7d78fa9b6f47dd47b81e2e1"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.1,<0.11.0" -llama-index-llms-openai = ">=0.1.1,<0.2.0" -llama-index-program-openai = ">=0.1.1,<0.2.0" - -[[package]] -name = "llama-index-readers-file" -version = "0.1.23" -description = "llama-index readers file integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_readers_file-0.1.23-py3-none-any.whl", hash = "sha256:32450d0a3edc6ef6af575f814beec39cd3a3351eaf0e3c97045bdd72a7a7b38d"}, - {file = "llama_index_readers_file-0.1.23.tar.gz", hash = "sha256:fde8ecb588e703849e51dc0f075f56d1f5db3bc1479dd00c21b42e93b81b6267"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.12.3,<5.0.0" -llama-index-core = ">=0.10.37.post1,<0.11.0" -pypdf = ">=4.0.1,<5.0.0" -striprtf = ">=0.0.26,<0.0.27" - -[package.extras] -pymupdf = ["pymupdf (>=1.23.21,<2.0.0)"] - -[[package]] -name = "llama-index-readers-llama-parse" -version = "0.1.4" -description = "llama-index readers llama-parse integration" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_index_readers_llama_parse-0.1.4-py3-none-any.whl", hash = "sha256:c4914b37d12cceee56fbd185cca80f87d60acbf8ea7a73f9719610180be1fcdd"}, - {file = "llama_index_readers_llama_parse-0.1.4.tar.gz", hash = "sha256:78608b193c818894aefeee0aa303f02b7f80f2e4caf13866c2fd3b0b1023e2c0"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.7,<0.11.0" -llama-parse = ">=0.4.0,<0.5.0" - -[[package]] -name = "llama-parse" -version = "0.4.3" -description = "Parse files into RAG-Optimized formats." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "llama_parse-0.4.3-py3-none-any.whl", hash = "sha256:c48c53a3080daeede293df620dddb1f381e084c31ee2dd44dce3f8615df723e8"}, - {file = "llama_parse-0.4.3.tar.gz", hash = "sha256:01836147b5238873b24a7dd41c5ab942b01b09b92d75570f30cf2861c084a0eb"}, -] - -[package.dependencies] -llama-index-core = ">=0.10.29" - -[[package]] -name = "llamaindex-py-client" -version = "0.1.19" -description = "" -optional = false -python-versions = "<4,>=3.8" -files = [ - {file = "llamaindex_py_client-0.1.19-py3-none-any.whl", hash = "sha256:fd9416fd78b97209bf323bc3c7fab314499778563e7274f10853ad560563d10e"}, - {file = "llamaindex_py_client-0.1.19.tar.gz", hash = "sha256:73f74792bb8c092bae6dc626627a09ac13a099fa8d10f8fcc83e17a2b332cca7"}, -] - -[package.dependencies] -httpx = ">=0.20.0" -pydantic = ">=1.10" - [[package]] name = "locust" version = "2.28.0" @@ -5554,18 +5286,15 @@ test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.0" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, + {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "numexpr" version = "2.10.0" @@ -5854,13 +5583,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.30.2" +version = "1.30.5" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.30.2-py3-none-any.whl", hash = "sha256:44316818fbff3845278e862a655c4c041e93d907b04eff64629c2835f29bd58e"}, - {file = "openai-1.30.2.tar.gz", hash = "sha256:f86780f40505de60fa389993d9b7f5564f20acfbe5efcabd5c853a12453af2b0"}, + {file = "openai-1.30.5-py3-none-any.whl", hash = "sha256:2ad95e926de0d2e09cde632a9204b0a6dca4a03c2cdcc84329b01f355784355a"}, + {file = "openai-1.30.5.tar.gz", hash = "sha256:5366562eb2c5917e6116ae0391b7ae6e3acd62b0ae3f565ada32b35d8fcfa106"}, ] [package.dependencies] @@ -5877,42 +5606,42 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] [[package]] name = "opentelemetry-api" -version = "1.24.0" +version = "1.25.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_api-1.24.0-py3-none-any.whl", hash = "sha256:0f2c363d98d10d1ce93330015ca7fd3a65f60be64e05e30f557c61de52c80ca2"}, - {file = "opentelemetry_api-1.24.0.tar.gz", hash = "sha256:42719f10ce7b5a9a73b10a4baf620574fb8ad495a9cbe5c18d76b75d8689c67e"}, + {file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"}, + {file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"}, ] [package.dependencies] deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=7.0" +importlib-metadata = ">=6.0,<=7.1" [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.24.0" +version = "1.25.0" description = "OpenTelemetry Protobuf encoding" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.24.0-py3-none-any.whl", hash = "sha256:e51f2c9735054d598ad2df5d3eca830fecfb5b0bda0a2fa742c9c7718e12f641"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.24.0.tar.gz", hash = "sha256:5d31fa1ff976cacc38be1ec4e3279a3f88435c75b38b1f7a099a1faffc302461"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3"}, ] [package.dependencies] -opentelemetry-proto = "1.24.0" +opentelemetry-proto = "1.25.0" [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.24.0" +version = "1.25.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0-py3-none-any.whl", hash = "sha256:f40d62aa30a0a43cc1657428e59fcf82ad5f7ea8fff75de0f9d9cb6f739e0a3b"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0.tar.gz", hash = "sha256:217c6e30634f2c9797999ea9da29f7300479a94a610139b9df17433f915e7baa"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac"}, ] [package.dependencies] @@ -5920,22 +5649,19 @@ deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" grpcio = ">=1.0.0,<2.0.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.24.0" -opentelemetry-proto = "1.24.0" -opentelemetry-sdk = ">=1.24.0,<1.25.0" - -[package.extras] -test = ["pytest-grpc"] +opentelemetry-exporter-otlp-proto-common = "1.25.0" +opentelemetry-proto = "1.25.0" +opentelemetry-sdk = ">=1.25.0,<1.26.0" [[package]] name = "opentelemetry-instrumentation" -version = "0.45b0" +version = "0.46b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation-0.45b0-py3-none-any.whl", hash = "sha256:06c02e2c952c1b076e8eaedf1b82f715e2937ba7eeacab55913dd434fbcec258"}, - {file = "opentelemetry_instrumentation-0.45b0.tar.gz", hash = "sha256:6c47120a7970bbeb458e6a73686ee9ba84b106329a79e4a4a66761f933709c7e"}, + {file = "opentelemetry_instrumentation-0.46b0-py3-none-any.whl", hash = "sha256:89cd721b9c18c014ca848ccd11181e6b3fd3f6c7669e35d59c48dc527408c18b"}, + {file = "opentelemetry_instrumentation-0.46b0.tar.gz", hash = "sha256:974e0888fb2a1e01c38fbacc9483d024bb1132aad92d6d24e2e5543887a7adda"}, ] [package.dependencies] @@ -5945,55 +5671,55 @@ wrapt = ">=1.0.0,<2.0.0" [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.45b0" +version = "0.46b0" description = "ASGI instrumentation for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_asgi-0.45b0-py3-none-any.whl", hash = "sha256:8be1157ed62f0db24e45fdf7933c530c4338bd025c5d4af7830e903c0756021b"}, - {file = "opentelemetry_instrumentation_asgi-0.45b0.tar.gz", hash = "sha256:97f55620f163fd3d20323e9fd8dc3aacc826c03397213ff36b877e0f4b6b08a6"}, + {file = "opentelemetry_instrumentation_asgi-0.46b0-py3-none-any.whl", hash = "sha256:f13c55c852689573057837a9500aeeffc010c4ba59933c322e8f866573374759"}, + {file = "opentelemetry_instrumentation_asgi-0.46b0.tar.gz", hash = "sha256:02559f30cf4b7e2a737ab17eb52aa0779bcf4cc06573064f3e2cb4dcc7d3040a"}, ] [package.dependencies] asgiref = ">=3.0,<4.0" opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.45b0" -opentelemetry-semantic-conventions = "0.45b0" -opentelemetry-util-http = "0.45b0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-semantic-conventions = "0.46b0" +opentelemetry-util-http = "0.46b0" [package.extras] instruments = ["asgiref (>=3.0,<4.0)"] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.45b0" +version = "0.46b0" description = "OpenTelemetry FastAPI Instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_fastapi-0.45b0-py3-none-any.whl", hash = "sha256:77d9c123a363129148f5f66d44094f3d67aaaa2b201396d94782b4a7f9ce4314"}, - {file = "opentelemetry_instrumentation_fastapi-0.45b0.tar.gz", hash = "sha256:5a6b91e1c08a01601845fcfcfdefd0a2aecdb3c356d4a436a3210cb58c21487e"}, + {file = "opentelemetry_instrumentation_fastapi-0.46b0-py3-none-any.whl", hash = "sha256:e0f5d150c6c36833dd011f0e6ef5ede6d7406c1aed0c7c98b2d3b38a018d1b33"}, + {file = "opentelemetry_instrumentation_fastapi-0.46b0.tar.gz", hash = "sha256:928a883a36fc89f9702f15edce43d1a7104da93d740281e32d50ffd03dbb4365"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.45b0" -opentelemetry-instrumentation-asgi = "0.45b0" -opentelemetry-semantic-conventions = "0.45b0" -opentelemetry-util-http = "0.45b0" +opentelemetry-instrumentation = "0.46b0" +opentelemetry-instrumentation-asgi = "0.46b0" +opentelemetry-semantic-conventions = "0.46b0" +opentelemetry-util-http = "0.46b0" [package.extras] instruments = ["fastapi (>=0.58,<1.0)"] [[package]] name = "opentelemetry-proto" -version = "1.24.0" +version = "1.25.0" description = "OpenTelemetry Python Proto" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_proto-1.24.0-py3-none-any.whl", hash = "sha256:bcb80e1e78a003040db71ccf83f2ad2019273d1e0828089d183b18a1476527ce"}, - {file = "opentelemetry_proto-1.24.0.tar.gz", hash = "sha256:ff551b8ad63c6cabb1845ce217a6709358dfaba0f75ea1fa21a61ceddc78cab8"}, + {file = "opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f"}, + {file = "opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3"}, ] [package.dependencies] @@ -6001,40 +5727,43 @@ protobuf = ">=3.19,<5.0" [[package]] name = "opentelemetry-sdk" -version = "1.24.0" +version = "1.25.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_sdk-1.24.0-py3-none-any.whl", hash = "sha256:fa731e24efe832e98bcd90902085b359dcfef7d9c9c00eb5b9a18587dae3eb59"}, - {file = "opentelemetry_sdk-1.24.0.tar.gz", hash = "sha256:75bc0563affffa827700e0f4f4a68e1e257db0df13372344aebc6f8a64cde2e5"}, + {file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"}, + {file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"}, ] [package.dependencies] -opentelemetry-api = "1.24.0" -opentelemetry-semantic-conventions = "0.45b0" +opentelemetry-api = "1.25.0" +opentelemetry-semantic-conventions = "0.46b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.45b0" +version = "0.46b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_semantic_conventions-0.45b0-py3-none-any.whl", hash = "sha256:a4a6fb9a7bacd9167c082aa4681009e9acdbfa28ffb2387af50c2fef3d30c864"}, - {file = "opentelemetry_semantic_conventions-0.45b0.tar.gz", hash = "sha256:7c84215a44ac846bc4b8e32d5e78935c5c43482e491812a0bb8aaf87e4d92118"}, + {file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"}, + {file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"}, ] +[package.dependencies] +opentelemetry-api = "1.25.0" + [[package]] name = "opentelemetry-util-http" -version = "0.45b0" +version = "0.46b0" description = "Web util for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_util_http-0.45b0-py3-none-any.whl", hash = "sha256:6628868b501b3004e1860f976f410eeb3d3499e009719d818000f24ce17b6e33"}, - {file = "opentelemetry_util_http-0.45b0.tar.gz", hash = "sha256:4ce08b6a7d52dd7c96b7705b5b4f06fdb6aa3eac1233b3b0bfef8a0cab9a92cd"}, + {file = "opentelemetry_util_http-0.46b0-py3-none-any.whl", hash = "sha256:8dc1949ce63caef08db84ae977fdc1848fe6dc38e6bbaad0ae3e6ecd0d451629"}, + {file = "opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6"}, ] [[package]] @@ -6525,13 +6254,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.45" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.45-py3-none-any.whl", hash = "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a"}, + {file = "prompt_toolkit-3.0.45.tar.gz", hash = "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089"}, ] [package.dependencies] @@ -6900,13 +6629,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pyautogen" -version = "0.2.27" +version = "0.2.28" description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework" optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "pyautogen-0.2.27-py3-none-any.whl", hash = "sha256:9eb5c38544a0f79475c43442f9c5af2623165e32a7b9dd24ec141492f603a630"}, - {file = "pyautogen-0.2.27.tar.gz", hash = "sha256:a8939d14fed1893109738a4c34ce490bfc6d869fd8a4ecb22932b86c81d9a5a5"}, + {file = "pyautogen-0.2.28-py3-none-any.whl", hash = "sha256:69dffa4053096f496a50c8a252bbe23105b58fd6ffbb422fa8c043ecf3fc732b"}, + {file = "pyautogen-0.2.28.tar.gz", hash = "sha256:f74686a981f2b6046a9cf6aff5a5e61615ec60d5559a49e7474467fbdf4e077b"}, ] [package.dependencies] @@ -6915,6 +6644,7 @@ docker = "*" flaml = "*" numpy = ">=1.17.0,<2" openai = ">=1.3" +packaging = "*" pydantic = ">=1.10,<2.6.0 || >2.6.0,<3" python-dotenv = "*" termcolor = "*" @@ -6928,11 +6658,12 @@ gemini = ["google-generativeai (>=0.5,<1)", "pillow", "pydantic"] graph = ["matplotlib", "networkx"] jupyter-executor = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter-kernel-gateway", "requests", "websocket-client"] lmm = ["pillow", "replicate"] +long-context = ["llmlingua (<0.3)"] mathchat = ["pydantic (==1.10.9)", "sympy", "wolframalpha"] redis = ["redis"] -retrievechat = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pypdf", "sentence-transformers"] -retrievechat-pgvector = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pgvector (>=0.2.5)", "psycopg (>=3.1.18)", "pypdf", "sentence-transformers"] -retrievechat-qdrant = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pypdf", "qdrant-client[fastembed]", "sentence-transformers"] +retrievechat = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "protobuf (==4.25.3)", "pypdf", "sentence-transformers"] +retrievechat-pgvector = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pgvector (>=0.2.5)", "protobuf (==4.25.3)", "psycopg (>=3.1.18)", "pypdf", "sentence-transformers"] +retrievechat-qdrant = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "protobuf (==4.25.3)", "pypdf", "qdrant-client[fastembed]", "sentence-transformers"] teachable = ["chromadb"] test = ["ipykernel", "nbconvert", "nbformat", "pandas", "pre-commit", "pytest (>=6.1.1,<8)", "pytest-asyncio", "pytest-cov (>=5)"] types = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter-kernel-gateway", "mypy (==1.9.0)", "pytest (>=6.1.1,<8)", "requests", "websocket-client"] @@ -6993,18 +6724,18 @@ files = [ [[package]] name = "pydantic" -version = "2.7.1" +version = "2.7.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, - {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, + {file = "pydantic-2.7.2-py3-none-any.whl", hash = "sha256:834ab954175f94e6e68258537dc49402c4a5e9d0409b9f1b86b7e934a8372de7"}, + {file = "pydantic-2.7.2.tar.gz", hash = "sha256:71b2945998f9c9b7919a45bde9a50397b289937d215ae141c1d0903ba7149fd7"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.2" +pydantic-core = "2.18.3" typing-extensions = ">=4.6.1" [package.extras] @@ -7012,90 +6743,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.2" +version = "2.18.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, - {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, - {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, - {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, - {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, - {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, - {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, - {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, - {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, - {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, - {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, - {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, - {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, - {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:744697428fcdec6be5670460b578161d1ffe34743a5c15656be7ea82b008197c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b40c05ced1ba4218b14986fe6f283d22e1ae2ff4c8e28881a70fb81fbfcda7"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a9a75622357076efb6b311983ff190fbfb3c12fc3a853122b34d3d358126c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2e253af04ceaebde8eb201eb3f3e3e7e390f2d275a88300d6a1959d710539e2"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:855ec66589c68aa367d989da5c4755bb74ee92ccad4fdb6af942c3612c067e34"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d3e42bb54e7e9d72c13ce112e02eb1b3b55681ee948d748842171201a03a98a"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6ac9ffccc9d2e69d9fba841441d4259cb668ac180e51b30d3632cd7abca2b9b"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c56eca1686539fa0c9bda992e7bd6a37583f20083c37590413381acfc5f192d6"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17954d784bf8abfc0ec2a633108207ebc4fa2df1a0e4c0c3ccbaa9bb01d2c426"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:98ed737567d8f2ecd54f7c8d4f8572ca7c7921ede93a2e52939416170d357812"}, + {file = "pydantic_core-2.18.3-cp310-none-win32.whl", hash = "sha256:9f9e04afebd3ed8c15d67a564ed0a34b54e52136c6d40d14c5547b238390e779"}, + {file = "pydantic_core-2.18.3-cp310-none-win_amd64.whl", hash = "sha256:45e4ffbae34f7ae30d0047697e724e534a7ec0a82ef9994b7913a412c21462a0"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9ebe8231726c49518b16b237b9fe0d7d361dd221302af511a83d4ada01183ab"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8e20e15d18bf7dbb453be78a2d858f946f5cdf06c5072453dace00ab652e2b2"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d9ff283cd3459fa0bf9b0256a2b6f01ac1ff9ffb034e24457b9035f75587cb"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f7ef5f0ebb77ba24c9970da18b771711edc5feaf00c10b18461e0f5f5949231"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73038d66614d2e5cde30435b5afdced2b473b4c77d4ca3a8624dd3e41a9c19be"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6afd5c867a74c4d314c557b5ea9520183fadfbd1df4c2d6e09fd0d990ce412cd"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd7df92f28d351bb9f12470f4c533cf03d1b52ec5a6e5c58c65b183055a60106"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80aea0ffeb1049336043d07799eace1c9602519fb3192916ff525b0287b2b1e4"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaee40f25bba38132e655ffa3d1998a6d576ba7cf81deff8bfa189fb43fd2bbe"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9128089da8f4fe73f7a91973895ebf2502539d627891a14034e45fb9e707e26d"}, + {file = "pydantic_core-2.18.3-cp311-none-win32.whl", hash = "sha256:fec02527e1e03257aa25b1a4dcbe697b40a22f1229f5d026503e8b7ff6d2eda7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_amd64.whl", hash = "sha256:58ff8631dbab6c7c982e6425da8347108449321f61fe427c52ddfadd66642af7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_arm64.whl", hash = "sha256:3fc1c7f67f34c6c2ef9c213e0f2a351797cda98249d9ca56a70ce4ebcaba45f4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f0928cde2ae416a2d1ebe6dee324709c6f73e93494d8c7aea92df99aab1fc40f"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bee9bb305a562f8b9271855afb6ce00223f545de3d68560b3c1649c7c5295e9"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e862823be114387257dacbfa7d78547165a85d7add33b446ca4f4fae92c7ff5c"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a36f78674cbddc165abab0df961b5f96b14461d05feec5e1f78da58808b97e7"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba905d184f62e7ddbb7a5a751d8a5c805463511c7b08d1aca4a3e8c11f2e5048"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fdd362f6a586e681ff86550b2379e532fee63c52def1c666887956748eaa326"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b214b7ee3bd3b865e963dbed0f8bc5375f49449d70e8d407b567af3222aae4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:691018785779766127f531674fa82bb368df5b36b461622b12e176c18e119022"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:60e4c625e6f7155d7d0dcac151edf5858102bc61bf959d04469ca6ee4e8381bd"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4e651e47d981c1b701dcc74ab8fec5a60a5b004650416b4abbef13db23bc7be"}, + {file = "pydantic_core-2.18.3-cp312-none-win32.whl", hash = "sha256:ffecbb5edb7f5ffae13599aec33b735e9e4c7676ca1633c60f2c606beb17efc5"}, + {file = "pydantic_core-2.18.3-cp312-none-win_amd64.whl", hash = "sha256:2c8333f6e934733483c7eddffdb094c143b9463d2af7e6bd85ebcb2d4a1b82c6"}, + {file = "pydantic_core-2.18.3-cp312-none-win_arm64.whl", hash = "sha256:7a20dded653e516a4655f4c98e97ccafb13753987434fe7cf044aa25f5b7d417"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:eecf63195be644b0396f972c82598cd15693550f0ff236dcf7ab92e2eb6d3522"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c44efdd3b6125419c28821590d7ec891c9cb0dff33a7a78d9d5c8b6f66b9702"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e59fca51ffbdd1638b3856779342ed69bcecb8484c1d4b8bdb237d0eb5a45e2"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70cf099197d6b98953468461d753563b28e73cf1eade2ffe069675d2657ed1d5"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63081a49dddc6124754b32a3774331467bfc3d2bd5ff8f10df36a95602560361"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:370059b7883485c9edb9655355ff46d912f4b03b009d929220d9294c7fd9fd60"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a64faeedfd8254f05f5cf6fc755023a7e1606af3959cfc1a9285744cc711044"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19d2e725de0f90d8671f89e420d36c3dd97639b98145e42fcc0e1f6d492a46dc"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:67bc078025d70ec5aefe6200ef094576c9d86bd36982df1301c758a9fff7d7f4"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf952c3f4100e203cbaf8e0c907c835d3e28f9041474e52b651761dc248a3c0"}, + {file = "pydantic_core-2.18.3-cp38-none-win32.whl", hash = "sha256:9a46795b1f3beb167eaee91736d5d17ac3a994bf2215a996aed825a45f897558"}, + {file = "pydantic_core-2.18.3-cp38-none-win_amd64.whl", hash = "sha256:200ad4e3133cb99ed82342a101a5abf3d924722e71cd581cc113fe828f727fbc"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:304378b7bf92206036c8ddd83a2ba7b7d1a5b425acafff637172a3aa72ad7083"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c826870b277143e701c9ccf34ebc33ddb4d072612683a044e7cce2d52f6c3fef"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e201935d282707394f3668380e41ccf25b5794d1b131cdd96b07f615a33ca4b1"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5560dda746c44b48bf82b3d191d74fe8efc5686a9ef18e69bdabccbbb9ad9442"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b32c2a1f8032570842257e4c19288eba9a2bba4712af542327de9a1204faff8"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:929c24e9dea3990bc8bcd27c5f2d3916c0c86f5511d2caa69e0d5290115344a9"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a8376fef60790152564b0eab376b3e23dd6e54f29d84aad46f7b264ecca943"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dccf3ef1400390ddd1fb55bf0632209d39140552d068ee5ac45553b556780e06"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41dbdcb0c7252b58fa931fec47937edb422c9cb22528f41cb8963665c372caf6"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:666e45cf071669fde468886654742fa10b0e74cd0fa0430a46ba6056b24fb0af"}, + {file = "pydantic_core-2.18.3-cp39-none-win32.whl", hash = "sha256:f9c08cabff68704a1b4667d33f534d544b8a07b8e5d039c37067fceb18789e78"}, + {file = "pydantic_core-2.18.3-cp39-none-win_amd64.whl", hash = "sha256:4afa5f5973e8572b5c0dcb4e2d4fda7890e7cd63329bd5cc3263a25c92ef0026"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:77319771a026f7c7d29c6ebc623de889e9563b7087911b46fd06c044a12aa5e9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:df11fa992e9f576473038510d66dd305bcd51d7dd508c163a8c8fe148454e059"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d531076bdfb65af593326ffd567e6ab3da145020dafb9187a1d131064a55f97c"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33ce258e4e6e6038f2b9e8b8a631d17d017567db43483314993b3ca345dcbbb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f9cd7f5635b719939019be9bda47ecb56e165e51dd26c9a217a433e3d0d59a9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cd4a032bb65cc132cae1fe3e52877daecc2097965cd3914e44fbd12b00dae7c5"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f2718430098bcdf60402136c845e4126a189959d103900ebabb6774a5d9fdb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0037a92cf0c580ed14e10953cdd26528e8796307bb8bb312dc65f71547df04d"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b95a0972fac2b1ff3c94629fc9081b16371dad870959f1408cc33b2f78ad347a"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a62e437d687cc148381bdd5f51e3e81f5b20a735c55f690c5be94e05da2b0d5c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b367a73a414bbb08507da102dc2cde0fa7afe57d09b3240ce82a16d608a7679c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ecce4b2360aa3f008da3327d652e74a0e743908eac306198b47e1c58b03dd2b"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4435b8d83f0c9561a2a9585b1de78f1abb17cb0cef5f39bf6a4b47d19bafe3"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:616221a6d473c5b9aa83fa8982745441f6a4a62a66436be9445c65f241b86c94"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7e6382ce89a92bc1d0c0c5edd51e931432202b9080dc921d8d003e616402efd1"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff58f379345603d940e461eae474b6bbb6dab66ed9a851ecd3cb3709bf4dcf6a"}, + {file = "pydantic_core-2.18.3.tar.gz", hash = "sha256:432e999088d85c8f36b9a3f769a8e2b57aabd817bbb729a90d1fe7f18f6f1f39"}, ] [package.dependencies] @@ -7250,6 +6981,16 @@ docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] image = ["Pillow (>=8.0.0)"] +[[package]] +name = "pyperclip" +version = "1.8.2" +description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +optional = false +python-versions = "*" +files = [ + {file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"}, +] + [[package]] name = "pypika" version = "0.48.9" @@ -7815,13 +7556,13 @@ langchain = ["langchain (>=0.0.321)"] [[package]] name = "realtime" -version = "1.0.4" +version = "1.0.5" description = "" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "realtime-1.0.4-py3-none-any.whl", hash = "sha256:b06bea001985f089167320bda1e91c6b2d866f56ca810bb8d768ee3cf695ee21"}, - {file = "realtime-1.0.4.tar.gz", hash = "sha256:a9095f60121a365e84656c582e6ccd8dc8b3a732ddddb2ccd26cc3d32b77bdf6"}, + {file = "realtime-1.0.5-py3-none-any.whl", hash = "sha256:93342fbcb8812ed8d81733f2782c1199376f0471e78014675420c7d31f2f327d"}, + {file = "realtime-1.0.5.tar.gz", hash = "sha256:4abbb3218b6ce8bd8d9d3b1112661d325e36ceab67a0e918673d0fd8fca04fb1"}, ] [package.dependencies] @@ -7937,13 +7678,13 @@ files = [ [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -8022,28 +7763,28 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.4.5" +version = "0.4.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.4.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8f58e615dec58b1a6b291769b559e12fdffb53cc4187160a2fc83250eaf54e96"}, - {file = "ruff-0.4.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:84dd157474e16e3a82745d2afa1016c17d27cb5d52b12e3d45d418bcc6d49264"}, - {file = "ruff-0.4.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f483ad9d50b00e7fd577f6d0305aa18494c6af139bce7319c68a17180087f4"}, - {file = "ruff-0.4.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:63fde3bf6f3ad4e990357af1d30e8ba2730860a954ea9282c95fc0846f5f64af"}, - {file = "ruff-0.4.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e3ba4620dee27f76bbcad97067766026c918ba0f2d035c2fc25cbdd04d9c97"}, - {file = "ruff-0.4.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:441dab55c568e38d02bbda68a926a3d0b54f5510095c9de7f95e47a39e0168aa"}, - {file = "ruff-0.4.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1169e47e9c4136c997f08f9857ae889d614c5035d87d38fda9b44b4338909cdf"}, - {file = "ruff-0.4.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:755ac9ac2598a941512fc36a9070a13c88d72ff874a9781493eb237ab02d75df"}, - {file = "ruff-0.4.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4b02a65985be2b34b170025a8b92449088ce61e33e69956ce4d316c0fe7cce0"}, - {file = "ruff-0.4.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:75a426506a183d9201e7e5664de3f6b414ad3850d7625764106f7b6d0486f0a1"}, - {file = "ruff-0.4.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6e1b139b45e2911419044237d90b60e472f57285950e1492c757dfc88259bb06"}, - {file = "ruff-0.4.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a6f29a8221d2e3d85ff0c7b4371c0e37b39c87732c969b4d90f3dad2e721c5b1"}, - {file = "ruff-0.4.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d6ef817124d72b54cc923f3444828ba24fa45c3164bc9e8f1813db2f3d3a8a11"}, - {file = "ruff-0.4.5-py3-none-win32.whl", hash = "sha256:aed8166c18b1a169a5d3ec28a49b43340949e400665555b51ee06f22813ef062"}, - {file = "ruff-0.4.5-py3-none-win_amd64.whl", hash = "sha256:b0b03c619d2b4350b4a27e34fd2ac64d0dabe1afbf43de57d0f9d8a05ecffa45"}, - {file = "ruff-0.4.5-py3-none-win_arm64.whl", hash = "sha256:9d15de3425f53161b3f5a5658d4522e4eee5ea002bf2ac7aa380743dd9ad5fba"}, - {file = "ruff-0.4.5.tar.gz", hash = "sha256:286eabd47e7d4d521d199cab84deca135557e6d1e0f0d01c29e757c3cb151b54"}, + {file = "ruff-0.4.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ef995583a038cd4a7edf1422c9e19118e2511b8ba0b015861b4abd26ec5367c5"}, + {file = "ruff-0.4.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:602ebd7ad909eab6e7da65d3c091547781bb06f5f826974a53dbe563d357e53c"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f9ced5cbb7510fd7525448eeb204e0a22cabb6e99a3cb160272262817d49786"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04a80acfc862e0e1630c8b738e70dcca03f350bad9e106968a8108379e12b31f"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47700ecb004dfa3fd4dcdddf7322d4e632de3c06cd05329d69c45c0280e618"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1ff930d6e05f444090a0139e4e13e1e2e1f02bd51bb4547734823c760c621e79"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f13410aabd3b5776f9c5699f42b37a3a348d65498c4310589bc6e5c548dc8a2f"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cf5cc02d3ae52dfb0c8a946eb7a1d6ffe4d91846ffc8ce388baa8f627e3bd50"}, + {file = "ruff-0.4.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea3424793c29906407e3cf417f28fc33f689dacbbadfb52b7e9a809dd535dcef"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1fa8561489fadf483ffbb091ea94b9c39a00ed63efacd426aae2f197a45e67fc"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d5b914818d8047270308fe3e85d9d7f4a31ec86c6475c9f418fbd1624d198e0"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4f02284335c766678778475e7698b7ab83abaf2f9ff0554a07b6f28df3b5c259"}, + {file = "ruff-0.4.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3a6a0a4f4b5f54fff7c860010ab3dd81425445e37d35701a965c0248819dde7a"}, + {file = "ruff-0.4.6-py3-none-win32.whl", hash = "sha256:9018bf59b3aa8ad4fba2b1dc0299a6e4e60a4c3bc62bbeaea222679865453062"}, + {file = "ruff-0.4.6-py3-none-win_amd64.whl", hash = "sha256:a769ae07ac74ff1a019d6bd529426427c3e30d75bdf1e08bb3d46ac8f417326a"}, + {file = "ruff-0.4.6-py3-none-win_arm64.whl", hash = "sha256:735a16407a1a8f58e4c5b913ad6102722e80b562dd17acb88887685ff6f20cf6"}, + {file = "ruff-0.4.6.tar.gz", hash = "sha256:a797a87da50603f71e6d0765282098245aca6e3b94b7c17473115167d8dfb0b7"}, ] [[package]] @@ -8601,30 +8342,30 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] [[package]] name = "structlog" -version = "24.1.0" +version = "24.2.0" description = "Structured Logging for Python" optional = false python-versions = ">=3.8" files = [ - {file = "structlog-24.1.0-py3-none-any.whl", hash = "sha256:3f6efe7d25fab6e86f277713c218044669906537bb717c1807a09d46bca0714d"}, - {file = "structlog-24.1.0.tar.gz", hash = "sha256:41a09886e4d55df25bdcb9b5c9674bccfab723ff43e0a86a1b7b236be8e57b16"}, + {file = "structlog-24.2.0-py3-none-any.whl", hash = "sha256:983bd49f70725c5e1e3867096c0c09665918936b3db27341b41d294283d7a48a"}, + {file = "structlog-24.2.0.tar.gz", hash = "sha256:0e3fe74924a6d8857d3f612739efb94c72a7417d7c7c008d12276bca3b5bf13b"}, ] [package.extras] -dev = ["structlog[tests,typing]"] +dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"] tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] typing = ["mypy (>=1.4)", "rich", "twisted"] [[package]] name = "supabase" -version = "2.4.6" +version = "2.5.0" description = "Supabase client for Python." optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "supabase-2.4.6-py3-none-any.whl", hash = "sha256:0bfd6bb33c0e6d6891b55caaf689140f47588b01436fecc336d1d75090c70e8b"}, - {file = "supabase-2.4.6.tar.gz", hash = "sha256:442be0729f5fd9258326ba89859f60bfd8d9218283ed7fd8a62ae81e2f310474"}, + {file = "supabase-2.5.0-py3-none-any.whl", hash = "sha256:13e5ed9e9377a1a69e70ad18ed7b82997cf13ffcd28173952f7503e4d5067771"}, + {file = "supabase-2.5.0.tar.gz", hash = "sha256:133dc832dfdd617f2f90ac5b52664df96ac8a9302ac6656ee769dc3f545812f0"}, ] [package.dependencies] @@ -8651,17 +8392,17 @@ httpx = ">=0.24,<0.28" [[package]] name = "sympy" -version = "1.12" +version = "1.12.1" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" files = [ - {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, - {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, + {file = "sympy-1.12.1-py3-none-any.whl", hash = "sha256:9b2cbc7f1a640289430e13d2a56f02f867a1da0190f2f99d8968c2f74da0e515"}, + {file = "sympy-1.12.1.tar.gz", hash = "sha256:2877b03f998cd8c08f07cd0de5b767119cd3ef40d09f41c30d722f6686b0fb88"}, ] [package.dependencies] -mpmath = ">=0.19" +mpmath = ">=1.1.0,<1.4.0" [[package]] name = "tbb" @@ -9286,13 +9027,13 @@ urllib3 = ">=2" [[package]] name = "types-setuptools" -version = "70.0.0.20240523" +version = "70.0.0.20240524" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-70.0.0.20240523.tar.gz", hash = "sha256:268c782f9d657bb0447a97bb9d50debd3a48721bb9d1d8194548d4835798beac"}, - {file = "types_setuptools-70.0.0.20240523-py3-none-any.whl", hash = "sha256:1828c1e2bc93cdb371fd0955fa51e27c3143490fe40a650db3fca22ea44233b3"}, + {file = "types-setuptools-70.0.0.20240524.tar.gz", hash = "sha256:e31fee7b9d15ef53980526579ac6089b3ae51a005a281acf97178e90ac71aff6"}, + {file = "types_setuptools-70.0.0.20240524-py3-none-any.whl", hash = "sha256:8f5379b9948682d72a9ab531fbe52932e84c4f38deda570255f9bae3edd766bc"}, ] [[package]] @@ -9419,6 +9160,35 @@ files = [ {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, ] +[[package]] +name = "uncurl" +version = "0.0.11" +description = "A library to convert curl requests to python-requests." +optional = false +python-versions = "*" +files = [ + {file = "uncurl-0.0.11-py3-none-any.whl", hash = "sha256:5961e93f07a5c9f2ef8ae4245bd92b0a6ce503c851de980f5b70080ae74cdc59"}, + {file = "uncurl-0.0.11.tar.gz", hash = "sha256:530c9bbd4d118f4cde6194165ff484cc25b0661cd256f19e9d5fcb53fc077790"}, +] + +[package.dependencies] +pyperclip = "*" +six = "*" + +[[package]] +name = "upstash-vector" +version = "0.4.0" +description = "Serverless Vector SDK from Upstash" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "upstash_vector-0.4.0-py3-none-any.whl", hash = "sha256:1ba11d8fc7d036bf93fde741b862c9e04ad962397dc600d1dc7546b63a84da82"}, + {file = "upstash_vector-0.4.0.tar.gz", hash = "sha256:a8ae11b2d3989c2615f1f06c66af39da763af7f7239b625fede621bf2fbb997d"}, +] + +[package.dependencies] +httpx = ">=0.24.0,<0.28" + [[package]] name = "uritemplate" version = "4.1.1" @@ -9570,88 +9340,102 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "vulture" +version = "2.11" +description = "Find dead code" +optional = false +python-versions = ">=3.8" +files = [ + {file = "vulture-2.11-py2.py3-none-any.whl", hash = "sha256:12d745f7710ffbf6aeb8279ba9068a24d4e52e8ed333b8b044035c9d6b823aba"}, + {file = "vulture-2.11.tar.gz", hash = "sha256:f0fbb60bce6511aad87ee0736c502456737490a82d919a44e6d92262cb35f1c2"}, +] + +[package.dependencies] +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "watchfiles" -version = "0.21.0" +version = "0.22.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.8" files = [ - {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, - {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, - {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, - {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, - {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, - {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, - {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, - {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, - {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, - {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, - {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, - {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, - {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, - {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, - {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, + {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"}, + {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"}, + {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"}, + {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"}, + {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"}, + {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"}, + {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"}, + {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"}, + {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"}, + {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"}, + {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"}, + {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"}, + {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"}, + {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"}, + {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"}, ] [package.dependencies] @@ -10175,18 +9959,18 @@ pydantic = ">=2.0.0" [[package]] name = "zipp" -version = "3.18.2" +version = "3.19.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"}, - {file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"}, + {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, + {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [[package]] name = "zope-event" @@ -10208,48 +9992,54 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "6.4.post1" +version = "6.4.post2" description = "Interfaces for Python" optional = false python-versions = ">=3.7" files = [ - {file = "zope.interface-6.4.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:16f73c42f10f761051157332943ee1f7cf973cc1c78a50d1960c313a211cca4a"}, - {file = "zope.interface-6.4.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ca89624d0eabc7ce4f299c6d621531cb8b0ebac3bb4f9ebf2d057477602e1b8"}, - {file = "zope.interface-6.4.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de6eaa0b7df493904d24050dcdc3db6589bd94f7e49caab57971fe47a669b3ea"}, - {file = "zope.interface-6.4.post1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a45a7990d143acc37faa905d4a528f5923a5dd30f46536977d8061d10a895b09"}, - {file = "zope.interface-6.4.post1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcefd4012593ee410ebf5728ee98f61b3401f0563c5068e760aa2b7720ca68a0"}, - {file = "zope.interface-6.4.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36234d3b8211d053c42684666c2a04eb1a35e0cec6bc3e54586bb60fb0be3b17"}, - {file = "zope.interface-6.4.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c18218451823ca9b5131ceaacf655fe9dd4e592ebf848cb0a65fe8428bbf604"}, - {file = "zope.interface-6.4.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:563a7192a5baf8d9b189dc598c3555e695e00fdce3eafb88b30d6d3df986fcc5"}, - {file = "zope.interface-6.4.post1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd3ab863a4e7d888728c949ba052a649664dea156bdd7140eb9269bbe6e33205"}, - {file = "zope.interface-6.4.post1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52901ff6d75a4332457610cbd2883f39b386c5bebe0745ecf78e3fe22cfdd0d9"}, - {file = "zope.interface-6.4.post1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:799743a342249c9b9529abd1115a3f81754800e75dea254b58efdd2984009798"}, - {file = "zope.interface-6.4.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:15fa208d7a802c0dd3e9d4d5336619a37efd57f2d2ce830d9f9d5843a2b7daba"}, - {file = "zope.interface-6.4.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91698257850ca5f523a5513a69775e6fb7c18129311e118996f8e9b463d11b0"}, - {file = "zope.interface-6.4.post1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45fb6fe8b5852564e63d6705a7904530a7c886056e6e9aaf938dc5e2bc637097"}, - {file = "zope.interface-6.4.post1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec57dec41c0c8b723dd70da1864d50908c689e1c9cf43f32e9b04c0992e5d93d"}, - {file = "zope.interface-6.4.post1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de6c1dad571276768fd6bc92999e8d942151552662a9048e3384cac05b148985"}, - {file = "zope.interface-6.4.post1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaa1f8967c3f272de80c4bec4b1379f97cd29006323f50558bd2f780a4f637ef"}, - {file = "zope.interface-6.4.post1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8140cf2665c5a07adc285bc859fdda67cfbd7edd62480dfca2211f4798502b54"}, - {file = "zope.interface-6.4.post1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:18f4061c3456c61557e9d7068e435f5db164b38f15f3d9bd995ff185c6db2c62"}, - {file = "zope.interface-6.4.post1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a430d2cc52aef2af0dc45866852730fbc93463cf8cdeb179e8ee04440e0955c4"}, - {file = "zope.interface-6.4.post1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:065f8ff0e034e43b8da05ffed308a9e3311720a2b13b83724f26a8dd6709964d"}, - {file = "zope.interface-6.4.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0940b3b44b6cc0375ea0da5fefee05a9abe8bb53594a3a6e4aafb9f99dc5de8d"}, - {file = "zope.interface-6.4.post1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d200aef16e577682dd54a79ff5f4f897a9807722b54bd8a9bca404679c609d"}, - {file = "zope.interface-6.4.post1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e0678885d07e865047e15be3ebe5c87903cc7f5ca5edfd0045d1c7b43f7fe9d"}, - {file = "zope.interface-6.4.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bf9fa875a9bae5318f24b0d9ab9e2c8a23bccad2979e9e4305eed8119bbe3195"}, - {file = "zope.interface-6.4.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68b13ac49becfaba5b77359559812daac0c5c4b3c0d43cdb293a2dec8db95c24"}, - {file = "zope.interface-6.4.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a00983c5c793b17b829020e11032dabab023c4e0ef12f134b90df802ae5adf2"}, - {file = "zope.interface-6.4.post1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ada5ac54ac7d34bb33423da40b7f3edfc54c6b9623ac9daac7f456dbf25173ba"}, - {file = "zope.interface-6.4.post1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297ee171f40f8f18665bb75f576df7d1ddce19f3e6696ef6acb930dcbfbf693f"}, - {file = "zope.interface-6.4.post1.tar.gz", hash = "sha256:e9961413091e3c9d5c3ed671757049cc6153280f39a154a0b633608efcfdec6b"}, + {file = "zope.interface-6.4.post2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c"}, + {file = "zope.interface-6.4.post2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9"}, + {file = "zope.interface-6.4.post2-cp310-cp310-win_amd64.whl", hash = "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7"}, + {file = "zope.interface-6.4.post2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede"}, + {file = "zope.interface-6.4.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8"}, + {file = "zope.interface-6.4.post2-cp311-cp311-win_amd64.whl", hash = "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250"}, + {file = "zope.interface-6.4.post2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde"}, + {file = "zope.interface-6.4.post2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854"}, + {file = "zope.interface-6.4.post2-cp312-cp312-win_amd64.whl", hash = "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc"}, + {file = "zope.interface-6.4.post2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1"}, + {file = "zope.interface-6.4.post2-cp37-cp37m-win_amd64.whl", hash = "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82"}, + {file = "zope.interface-6.4.post2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b"}, + {file = "zope.interface-6.4.post2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43"}, + {file = "zope.interface-6.4.post2-cp38-cp38-win_amd64.whl", hash = "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5"}, + {file = "zope.interface-6.4.post2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2"}, + {file = "zope.interface-6.4.post2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671"}, + {file = "zope.interface-6.4.post2-cp39-cp39-win_amd64.whl", hash = "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15"}, + {file = "zope.interface-6.4.post2.tar.gz", hash = "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e"}, ] [package.dependencies] setuptools = "*" [package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx_rtd_theme"] +docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] @@ -10260,4 +10050,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "33629727ceeb0aa86064658e89349c24fd786bb1bd3833f093651b70b264edb7" +content-hash = "476c95dc8c6adb597a0cd2783eab65c02e0398fc144aa74d56a4cb36032f496f" diff --git a/pyproject.toml b/pyproject.toml index 6a5401bea..8ff91de1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "1.0.0a37" +version = "1.0.0a43" description = "A Python package with a built-in web application" authors = ["Langflow "] maintainers = [ @@ -84,6 +84,8 @@ langchain-mistralai = "^0.1.6" couchbase = "^4.2.1" youtube-transcript-api = "^0.6.2" markdown = "^3.6" +langchain-chroma = "^0.1.1" +upstash-vector = "^0.4.0" [tool.poetry.group.dev.dependencies] @@ -112,6 +114,7 @@ pytest-instafail = "^0.5.0" pytest-asyncio = "^0.23.0" pytest-profiling = "^1.7.0" pre-commit = "^3.7.0" +vulture = "^2.11" [tool.poetry.extras] deploy = ["celery", "redis", "flower"] diff --git a/render.yaml b/render.yaml index 583a3c324..9276efee1 100644 --- a/render.yaml +++ b/render.yaml @@ -3,9 +3,9 @@ services: - type: web name: langflow runtime: docker - dockerfilePath: ./Dockerfile + dockerfilePath: ./docker/render.Dockerfile repo: https://github.com/langflow-ai/langflow - branch: main + branch: dev healthCheckPath: /health autoDeploy: false envVars: diff --git a/scripts/factory_restart_space.py b/scripts/factory_restart_space.py new file mode 100644 index 000000000..100bef06c --- /dev/null +++ b/scripts/factory_restart_space.py @@ -0,0 +1,16 @@ +import os + +from huggingface_hub import HfApi, list_models +from rich import print + +# Use root method +models = list_models() + +# Or configure a HfApi client +hf_api = HfApi( + endpoint="https://huggingface.co", # Can be a Private Hub endpoint. + token=os.getenv("HUGGINFACE_API_TOKEN") or "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", +) + +space_runtime = hf_api.restart_space("Langflow/Langflow-Preview", factory_reboot=True) +print(space_runtime) diff --git a/GCP_DEPLOYMENT.md b/scripts/gcp/GCP_DEPLOYMENT.md similarity index 99% rename from GCP_DEPLOYMENT.md rename to scripts/gcp/GCP_DEPLOYMENT.md index 9f17e550b..a848d3d2b 100644 --- a/GCP_DEPLOYMENT.md +++ b/scripts/gcp/GCP_DEPLOYMENT.md @@ -20,8 +20,7 @@ When running as a [spot (preemptible) instance](https://cloud.google.com/compute ## Pricing (approximate) -> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator) ->
+> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator) >
| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes | | ------------------ | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | -------------------------------------------------------------------------- | diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py index ee1eb18f2..343188336 100644 --- a/src/backend/base/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -22,7 +22,8 @@ from sqlmodel import select from langflow.main import setup_app from langflow.services.database.models.folder.utils import create_default_folder_if_it_doesnt_exist from langflow.services.database.utils import session_getter -from langflow.services.deps import get_db_service +from langflow.services.deps import get_db_service, get_settings_service, session_scope +from langflow.services.settings.constants import DEFAULT_SUPERUSER from langflow.services.utils import initialize_services from langflow.utils.logger import configure, logger from langflow.utils.util import update_settings @@ -76,14 +77,13 @@ def set_var_for_macos_issue(): def run( host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"), workers: int = typer.Option(1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"), - timeout: int = typer.Option(300, help="Worker timeout in seconds."), + timeout: int = typer.Option(300, help="Worker timeout in seconds.", envvar="LANGFLOW_WORKER_TIMEOUT"), port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"), components_path: Optional[Path] = typer.Option( Path(__file__).parent / "components", help="Path to the directory containing custom components.", envvar="LANGFLOW_COMPONENTS_PATH", ), - config: str = typer.Option(Path(__file__).parent / "config.yaml", help="Path to the configuration file."), # .env file param env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."), log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), @@ -132,7 +132,6 @@ def run( load_dotenv(env_file, override=True) update_settings( - config, dev=dev, remove_api_keys=remove_api_keys, cache=cache, @@ -146,6 +145,10 @@ def run( if is_port_in_use(port, host): port = get_free_port(port) + settings_service = get_settings_service() + + settings_service.set("worker_timeout", timeout) + options = { "bind": f"{host}:{port}", "workers": get_number_of_workers(workers), @@ -510,6 +513,66 @@ def migration( display_results(results) +@app.command() +def api_key( + log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), +): + """ + Creates an API key for the default superuser if AUTO_LOGIN is enabled. + + Args: + log_level (str, optional): Logging level. Defaults to "error". + + Returns: + None + """ + configure(log_level=log_level) + initialize_services() + settings_service = get_settings_service() + auth_settings = settings_service.auth_settings + if not auth_settings.AUTO_LOGIN: + typer.echo("Auto login is disabled. API keys cannot be created through the CLI.") + return + with session_scope() as session: + from langflow.services.database.models.user.model import User + + superuser = session.exec(select(User).where(User.username == DEFAULT_SUPERUSER)).first() + if not superuser: + typer.echo("Default superuser not found. This command requires a superuser and AUTO_LOGIN to be enabled.") + return + from langflow.services.database.models.api_key import ApiKey, ApiKeyCreate + from langflow.services.database.models.api_key.crud import create_api_key, delete_api_key + + api_key = session.exec(select(ApiKey).where(ApiKey.user_id == superuser.id)).first() + if api_key: + delete_api_key(session, api_key.id) + + api_key_create = ApiKeyCreate(name="CLI") + unmasked_api_key = create_api_key(session, api_key_create, user_id=superuser.id) + session.commit() + # Create a banner to display the API key and tell the user it won't be shown again + api_key_banner(unmasked_api_key) + + +def api_key_banner(unmasked_api_key): + is_mac = platform.system() == "Darwin" + import pyperclip # type: ignore + + pyperclip.copy(unmasked_api_key.api_key) + panel = Panel( + f"[bold]API Key Created Successfully:[/bold]\n\n" + f"[bold blue]{unmasked_api_key.api_key}[/bold blue]\n\n" + "This is the only time the API key will be displayed. \n" + "Make sure to store it in a secure location. \n\n" + f"The API key has been copied to your clipboard. [bold]{['Ctrl','Cmd'][is_mac]} + V[/bold] to paste it.", + box=box.ROUNDED, + border_style="blue", + expand=False, + ) + console = Console() + console.print(panel) + + def main(): with warnings.catch_warnings(): warnings.simplefilter("ignore") diff --git a/src/backend/base/langflow/alembic/script.py.mako b/src/backend/base/langflow/alembic/script.py.mako index bc9bca83a..6086a860c 100644 --- a/src/backend/base/langflow/alembic/script.py.mako +++ b/src/backend/base/langflow/alembic/script.py.mako @@ -11,6 +11,7 @@ from alembic import op import sqlalchemy as sa import sqlmodel from sqlalchemy.engine.reflection import Inspector +from langflow.utils import migration ${imports if imports else ""} # revision identifiers, used by Alembic. @@ -22,13 +23,9 @@ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade() -> None: conn = op.get_bind() - inspector = Inspector.from_engine(conn) # type: ignore - table_names = inspector.get_table_names() ${upgrades if upgrades else "pass"} def downgrade() -> None: conn = op.get_bind() - inspector = Inspector.from_engine(conn) # type: ignore - table_names = inspector.get_table_names() ${downgrades if downgrades else "pass"} diff --git a/src/backend/base/langflow/alembic/versions/012fb73ac359_add_folder_table.py b/src/backend/base/langflow/alembic/versions/012fb73ac359_add_folder_table.py index b9ebc2ca2..a9b9b6c00 100644 --- a/src/backend/base/langflow/alembic/versions/012fb73ac359_add_folder_table.py +++ b/src/backend/base/langflow/alembic/versions/012fb73ac359_add_folder_table.py @@ -48,10 +48,12 @@ def upgrade() -> None: with op.batch_alter_table("folder", schema=None) as batch_op: batch_op.create_index(batch_op.f("ix_folder_name"), ["name"], unique=False) - if "folder_id" not in inspector.get_columns("flow"): - with op.batch_alter_table("flow", schema=None) as batch_op: + column_names = [column["name"] for column in inspector.get_columns("flow")] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "folder_id" not in column_names: batch_op.add_column(sa.Column("folder_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)) batch_op.create_foreign_key("flow_folder_id_fkey", "folder", ["folder_id"], ["id"]) + if "folder" in column_names: batch_op.drop_column("folder") # ### end Alembic commands ### @@ -62,11 +64,13 @@ def downgrade() -> None: inspector = Inspector.from_engine(conn) # type: ignore table_names = inspector.get_table_names() # ### commands auto generated by Alembic - please adjust! ### - if "folder_id" in inspector.get_columns("flow"): - with op.batch_alter_table("flow", schema=None) as batch_op: + column_names = [column["name"] for column in inspector.get_columns("flow")] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "folder" not in column_names: batch_op.add_column(sa.Column("folder", sa.VARCHAR(), nullable=True)) - batch_op.drop_constraint("flow_folder_id_fkey", type_="foreignkey") + if "folder_id" in column_names: batch_op.drop_column("folder_id") + batch_op.drop_constraint("flow_folder_id_fkey", type_="foreignkey") indexes = inspector.get_indexes("folder") if "ix_folder_name" in [index["name"] for index in indexes]: diff --git a/src/backend/base/langflow/alembic/versions/1c79524817ed_add_unique_constraints_per_user_in_.py b/src/backend/base/langflow/alembic/versions/1c79524817ed_add_unique_constraints_per_user_in_.py new file mode 100644 index 000000000..0feec1b8b --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/1c79524817ed_add_unique_constraints_per_user_in_.py @@ -0,0 +1,42 @@ +"""Add unique constraints per user in folder table + +Revision ID: 1c79524817ed +Revises: 3bb0ddf32dfb +Create Date: 2024-05-29 23:12:09.146880 + +""" + +from typing import Sequence, Union + +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "1c79524817ed" +down_revision: Union[str, None] = "3bb0ddf32dfb" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("folder")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("folder", schema=None) as batch_op: + if "unique_folder_name" not in constraints_names: + batch_op.create_unique_constraint("unique_folder_name", ["user_id", "name"]) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("folder")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("folder", schema=None) as batch_op: + if "unique_folder_name" in constraints_names: + batch_op.drop_constraint("unique_folder_name", type_="unique") + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/3bb0ddf32dfb_add_unique_constraints_per_user_in_flow_.py b/src/backend/base/langflow/alembic/versions/3bb0ddf32dfb_add_unique_constraints_per_user_in_flow_.py new file mode 100644 index 000000000..699df1437 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/3bb0ddf32dfb_add_unique_constraints_per_user_in_flow_.py @@ -0,0 +1,54 @@ +"""Add unique constraints per user in flow table + +Revision ID: 3bb0ddf32dfb +Revises: a72f5cf9c2f9 +Create Date: 2024-05-29 23:08:43.935040 + +""" + +from typing import Sequence, Union + +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "3bb0ddf32dfb" +down_revision: Union[str, None] = "a72f5cf9c2f9" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + # ### commands auto generated by Alembic - please adjust! ### + indexes_names = [index["name"] for index in inspector.get_indexes("flow")] + constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("flow")] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "ix_flow_endpoint_name" in indexes_names: + batch_op.drop_index("ix_flow_endpoint_name") + batch_op.create_index(batch_op.f("ix_flow_endpoint_name"), ["endpoint_name"], unique=False) + if "unique_flow_endpoint_name" not in constraints_names: + batch_op.create_unique_constraint("unique_flow_endpoint_name", ["user_id", "endpoint_name"]) + if "unique_flow_name" not in constraints_names: + batch_op.create_unique_constraint("unique_flow_name", ["user_id", "name"]) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + # ### commands auto generated by Alembic - please adjust! ### + indexes_names = [index["name"] for index in inspector.get_indexes("flow")] + constraints_names = [constraint["name"] for constraint in inspector.get_unique_constraints("flow")] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "unique_flow_name" in constraints_names: + batch_op.drop_constraint("unique_flow_name", type_="unique") + if "unique_flow_endpoint_name" in constraints_names: + batch_op.drop_constraint("unique_flow_endpoint_name", type_="unique") + if "ix_flow_endpoint_name" in indexes_names: + batch_op.drop_index(batch_op.f("ix_flow_endpoint_name")) + batch_op.create_index("ix_flow_endpoint_name", ["endpoint_name"], unique=1) + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/631faacf5da2_add_webhook_columns.py b/src/backend/base/langflow/alembic/versions/631faacf5da2_add_webhook_columns.py new file mode 100644 index 000000000..379fba17c --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/631faacf5da2_add_webhook_columns.py @@ -0,0 +1,45 @@ +"""Add webhook columns + +Revision ID: 631faacf5da2 +Revises: 1c79524817ed +Create Date: 2024-04-22 15:14:43.454784 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "631faacf5da2" +down_revision: Union[str, None] = "1c79524817ed" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() + # ### commands auto generated by Alembic - please adjust! ### + column_names = [column["name"] for column in inspector.get_columns("flow")] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "flow" in table_names and "webhook" not in column_names: + batch_op.add_column(sa.Column("webhook", sa.Boolean(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() + # ### commands auto generated by Alembic - please adjust! ### + column_names = [column["name"] for column in inspector.get_columns("flow")] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "flow" in table_names and "webhook" in column_names: + batch_op.drop_column("webhook") + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py b/src/backend/base/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py index b46400899..7499b32ae 100644 --- a/src/backend/base/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py +++ b/src/backend/base/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py @@ -52,9 +52,14 @@ def upgrade() -> None: def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### try: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + column_names = [column["name"] for column in inspector.get_columns("flow")] with op.batch_alter_table("flow", schema=None) as batch_op: - batch_op.drop_column("folder") - batch_op.drop_column("updated_at") + if "folder" in column_names: + batch_op.drop_column("folder") + if "updated_at" in column_names: + batch_op.drop_column("updated_at") except Exception as e: print(e) pass diff --git a/src/backend/base/langflow/alembic/versions/a72f5cf9c2f9_add_endpoint_name_col.py b/src/backend/base/langflow/alembic/versions/a72f5cf9c2f9_add_endpoint_name_col.py new file mode 100644 index 000000000..3d6dd604c --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/a72f5cf9c2f9_add_endpoint_name_col.py @@ -0,0 +1,52 @@ +"""Add endpoint name col + +Revision ID: a72f5cf9c2f9 +Revises: 29fe8f1f806b +Create Date: 2024-05-29 21:44:04.240816 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +import sqlmodel +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "a72f5cf9c2f9" +down_revision: Union[str, None] = "29fe8f1f806b" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + # ### commands auto generated by Alembic - please adjust! ### + column_names = [column["name"] for column in inspector.get_columns("flow")] + indexes = inspector.get_indexes("flow") + index_names = [index["name"] for index in indexes] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "endpoint_name" not in column_names: + batch_op.add_column(sa.Column("endpoint_name", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + if "ix_flow_endpoint_name" not in index_names: + batch_op.create_index(batch_op.f("ix_flow_endpoint_name"), ["endpoint_name"], unique=True) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + # ### commands auto generated by Alembic - please adjust! ### + column_names = [column["name"] for column in inspector.get_columns("flow")] + indexes = inspector.get_indexes("flow") + index_names = [index["name"] for index in indexes] + with op.batch_alter_table("flow", schema=None) as batch_op: + if "ix_flow_endpoint_name" in index_names: + batch_op.drop_index(batch_op.f("ix_flow_endpoint_name")) + if "endpoint_name" in column_names: + batch_op.drop_column("endpoint_name") + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/api/utils.py b/src/backend/base/langflow/api/utils.py index ffb5f22c9..cc38b474a 100644 --- a/src/backend/base/langflow/api/utils.py +++ b/src/backend/base/langflow/api/utils.py @@ -286,7 +286,7 @@ async def get_next_runnable_vertices( for v_id in set(next_runnable_vertices): # Use set to avoid duplicates graph.vertices_to_run.remove(v_id) graph.remove_from_predecessors(v_id) - await chat_service.set_cache(flow_id=flow_id, data=graph, lock=lock) + await chat_service.set_cache(key=flow_id, data=graph, lock=lock) return next_runnable_vertices diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py index 23dea900b..fbb763e8d 100644 --- a/src/backend/base/langflow/api/v1/chat.py +++ b/src/backend/base/langflow/api/v1/chat.py @@ -1,6 +1,5 @@ import time import uuid -from functools import partial from typing import TYPE_CHECKING, Annotated, Optional from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException @@ -162,7 +161,6 @@ async def build_vertex( vertex = graph.get_vertex(vertex_id) try: lock = chat_service._cache_locks[flow_id_str] - set_cache_coro = partial(chat_service.set_cache, flow_id=flow_id_str) ( next_runnable_vertices, top_level_vertices, @@ -173,7 +171,7 @@ async def build_vertex( vertex, ) = await graph.build_vertex( lock=lock, - set_cache_coro=set_cache_coro, + chat_service=chat_service, vertex_id=vertex_id, user_id=current_user.id, inputs_dict=inputs.model_dump() if inputs else {}, diff --git a/src/backend/base/langflow/api/v1/endpoints.py b/src/backend/base/langflow/api/v1/endpoints.py index e7bb761ba..df55e21d4 100644 --- a/src/backend/base/langflow/api/v1/endpoints.py +++ b/src/backend/base/langflow/api/v1/endpoints.py @@ -1,14 +1,15 @@ from http import HTTPStatus -from typing import Annotated, List, Optional, Union +from typing import TYPE_CHECKING, Annotated, List, Optional, Union from uuid import UUID import sqlalchemy as sa -from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status +from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException, Request, UploadFile, status from loguru import logger from sqlmodel import Session, select from langflow.api.utils import update_frontend_node_with_template_values from langflow.api.v1.schemas import ( + ConfigResponse, CustomComponentRequest, InputValueRequest, ProcessResponse, @@ -18,20 +19,25 @@ from langflow.api.v1.schemas import ( UpdateCustomComponentRequest, UploadFileResponse, ) +from langflow.custom import CustomComponent +from langflow.custom.utils import build_custom_component_template from langflow.graph.graph.base import Graph -from langflow.interface.custom.custom_component import CustomComponent -from langflow.interface.custom.utils import build_custom_component_template +from langflow.graph.schema import RunOutputs +from langflow.helpers.flow import get_flow_by_id_or_endpoint_name from langflow.processing.process import process_tweaks, run_graph_internal from langflow.schema.graph import Tweaks from langflow.services.auth.utils import api_key_security, get_current_active_user from langflow.services.cache.utils import save_uploaded_file from langflow.services.database.models.flow import Flow +from langflow.services.database.models.flow.utils import get_all_webhook_components_in_flow, get_flow_by_id from langflow.services.database.models.user.model import User from langflow.services.deps import get_session, get_session_service, get_settings_service, get_task_service from langflow.services.session.service import SessionService from langflow.services.task.service import TaskService -# build router +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsService + router = APIRouter(tags=["Base"]) @@ -43,17 +49,82 @@ def get_all( logger.debug("Building langchain types dict") try: - all_types_dict = get_all_types_dict(settings_service.settings.COMPONENTS_PATH) + all_types_dict = get_all_types_dict(settings_service.settings.components_path) return all_types_dict except Exception as exc: logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc -@router.post("/run/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) +async def simple_run_flow( + db: Session, + flow: Flow, + input_request: SimplifiedAPIRequest, + session_service: SessionService, + stream: bool = False, + api_key_user: Optional[User] = None, +): + try: + task_result: List[RunOutputs] = [] + artifacts = {} + user_id = api_key_user.id if api_key_user else None + flow_id_str = str(flow.id) + if input_request.session_id: + session_data = await session_service.load_session(input_request.session_id, flow_id=flow_id_str) + graph, artifacts = session_data if session_data else (None, None) + if graph is None: + raise ValueError(f"Session {input_request.session_id} not found") + else: + if flow.data is None: + raise ValueError(f"Flow {flow_id_str} has no data") + graph_data = flow.data + graph_data = process_tweaks(graph_data, input_request.tweaks or {}) + graph = Graph.from_payload(graph_data, flow_id=flow_id_str, user_id=str(user_id)) + inputs = [ + InputValueRequest(components=[], input_value=input_request.input_value, type=input_request.input_type) + ] + # outputs is a list of all components that should return output + # we need to get them by checking their type + # if the output type is debug, we return all outputs + # if the output type is any, we return all outputs that are either chat or text + # if the output type is chat or text, we return only the outputs that match the type + if input_request.output_component: + outputs = [input_request.output_component] + else: + outputs = [ + vertex.id + for vertex in graph.vertices + if input_request.output_type == "debug" + or ( + vertex.is_output + and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower()) + ) + ] + task_result, session_id = await run_graph_internal( + graph=graph, + flow_id=flow_id_str, + session_id=input_request.session_id, + inputs=inputs, + outputs=outputs, + artifacts=artifacts, + session_service=session_service, + stream=stream, + ) + + return RunResponse(outputs=task_result, session_id=session_id) + + except sa.exc.StatementError as exc: + # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') + if "badly formed hexadecimal UUID string" in str(exc): + logger.error(f"Flow ID {flow_id_str} is not a valid UUID") + # This means the Flow ID is not a valid UUID which means it can't find the flow + raise ValueError(str(exc)) from exc + + +@router.post("/run/{flow_id_or_name}", response_model=RunResponse, response_model_exclude_none=True) async def simplified_run_flow( db: Annotated[Session, Depends(get_session)], - flow_id: UUID, + flow: Annotated[Flow, Depends(get_flow_by_id_or_endpoint_name)], input_request: SimplifiedAPIRequest = SimplifiedAPIRequest(), stream: bool = False, api_key_user: User = Depends(api_key_security), @@ -64,7 +135,7 @@ async def simplified_run_flow( ### Parameters: - `db` (Session): Database session for executing queries. - - `flow_id` (str): Unique identifier of the flow to be executed. + - `flow_id_or_name` (str): ID or endpoint name of the flow to run. - `input_request` (SimplifiedAPIRequest): Request object containing input values, types, output selection, tweaks, and session ID. - `api_key_user` (User): User object derived from the provided API key, used for authentication. - `session_service` (SessionService): Service for managing flow sessions, essential for session reuse and caching. @@ -107,73 +178,21 @@ async def simplified_run_flow( This endpoint provides a powerful interface for executing flows with enhanced flexibility and efficiency, supporting a wide range of applications by allowing for dynamic input and output configuration along with performance optimizations through session management and caching. """ - session_id = input_request.session_id - try: - flow_id_str = str(flow_id) - artifacts = {} - if input_request.session_id: - session_data = await session_service.load_session(input_request.session_id, flow_id=flow_id_str) - graph, artifacts = session_data if session_data else (None, None) - if graph is None: - raise ValueError(f"Session {input_request.session_id} not found") - else: - # Get the flow that matches the flow_id and belongs to the user - # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() - flow = db.exec(select(Flow).where(Flow.id == flow_id_str).where(Flow.user_id == api_key_user.id)).first() - if flow is None: - raise ValueError(f"Flow {flow_id_str} not found") - - if flow.data is None: - raise ValueError(f"Flow {flow_id_str} has no data") - graph_data = flow.data - - graph_data = process_tweaks(graph_data, input_request.tweaks or {}, stream=stream) - graph = Graph.from_payload(graph_data, flow_id=flow_id_str, user_id=str(api_key_user.id)) - inputs = [ - InputValueRequest(components=[], input_value=input_request.input_value, type=input_request.input_type) - ] - # outputs is a list of all components that should return output - # we need to get them by checking their type - # if the output type is debug, we return all outputs - # if the output type is any, we return all outputs that are either chat or text - # if the output type is chat or text, we return only the outputs that match the type - if input_request.output_component: - outputs = [input_request.output_component] - else: - outputs = [ - vertex.id - for vertex in graph.vertices - if input_request.output_type == "debug" - or ( - vertex.is_output - and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower()) - ) - ] - task_result, session_id = await run_graph_internal( - graph=graph, - flow_id=flow_id_str, - session_id=input_request.session_id, - inputs=inputs, - outputs=outputs, - artifacts=artifacts, + return await simple_run_flow( + db=db, + flow=flow, + input_request=input_request, session_service=session_service, stream=stream, + api_key_user=api_key_user, ) - return RunResponse(outputs=task_result, session_id=session_id) - except sa.exc.StatementError as exc: - # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') - if "badly formed hexadecimal UUID string" in str(exc): - logger.error(f"Flow ID {flow_id_str} is not a valid UUID") - # This means the Flow ID is not a valid UUID which means it can't find the flow - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc except ValueError as exc: - if f"Flow {flow_id_str} not found" in str(exc): - logger.error(f"Flow {flow_id_str} not found") - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc - elif f"Session {session_id} not found" in str(exc): - logger.error(f"Session {session_id} not found") + if "badly formed hexadecimal UUID string" in str(exc): + # This means the Flow ID is not a valid UUID which means it can't find the flow + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc + if "not found" in str(exc): raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc else: logger.exception(exc) @@ -183,6 +202,68 @@ async def simplified_run_flow( raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc +@router.post("/webhook/{flow_id}", response_model=dict, status_code=HTTPStatus.ACCEPTED) +async def webhook_run_flow( + db: Annotated[Session, Depends(get_session)], + flow: Annotated[Flow, Depends(get_flow_by_id)], + request: Request, + background_tasks: BackgroundTasks, + session_service: SessionService = Depends(get_session_service), +): + """ + Run a flow using a webhook request. + + Args: + db (Session): The database session. + request (Request): The incoming HTTP request. + background_tasks (BackgroundTasks): The background tasks manager. + session_service (SessionService, optional): The session service. Defaults to Depends(get_session_service). + flow (Flow, optional): The flow to be executed. Defaults to Depends(get_flow_by_id). + + Returns: + dict: A dictionary containing the status of the task. + + Raises: + HTTPException: If the flow is not found or if there is an error processing the request. + """ + try: + logger.debug("Received webhook request") + data = await request.body() + if not data: + logger.error("Request body is empty") + raise ValueError( + "Request body is empty. You should provide a JSON payload containing the flow ID.", + ) + + # get all webhook components in the flow + webhook_components = get_all_webhook_components_in_flow(flow.data) + tweaks = {} + data_dict = await request.json() + for component in webhook_components: + tweaks[component["id"]] = {"data": data.decode() if isinstance(data, bytes) else data} + input_request = SimplifiedAPIRequest( + input_value=data_dict.get("input_value", ""), + input_type=data_dict.get("input_type", "chat"), + output_type=data_dict.get("output_type", "chat"), + tweaks=tweaks, + session_id=data_dict.get("session_id"), + ) + logger.debug("Starting background task") + background_tasks.add_task( + simple_run_flow, + db=db, + flow=flow, + input_request=input_request, + session_service=session_service, + ) + return {"message": "Task started in the background", "status": "in progress"} + except Exception as exc: + if "Flow ID is required" in str(exc) or "Request body is empty" in str(exc): + raise HTTPException(status_code=400, detail=str(exc)) from exc + logger.exception(exc) + raise HTTPException(status_code=500, detail=str(exc)) from exc + + @router.post("/run/advanced/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) async def experimental_run_flow( session: Annotated[Session, Depends(get_session)], @@ -440,3 +521,15 @@ async def custom_component_update( except Exception as exc: logger.exception(exc) raise HTTPException(status_code=400, detail=str(exc)) from exc + + +@router.get("/config", response_model=ConfigResponse) +def get_config(): + try: + from langflow.services.deps import get_settings_service + + settings_service: "SettingsService" = get_settings_service() + return settings_service.settings.model_dump() + except Exception as exc: + logger.exception(exc) + raise HTTPException(status_code=500, detail=str(exc)) from exc diff --git a/src/backend/base/langflow/api/v1/flows.py b/src/backend/base/langflow/api/v1/flows.py index 7fe7c516b..36030a12d 100644 --- a/src/backend/base/langflow/api/v1/flows.py +++ b/src/backend/base/langflow/api/v1/flows.py @@ -13,6 +13,7 @@ from langflow.api.v1.schemas import FlowListCreate, FlowListIds, FlowListRead from langflow.initial_setup.setup import STARTER_FOLDER_NAME from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate +from langflow.services.database.models.flow.utils import get_webhook_component_in_flow from langflow.services.database.models.folder.constants import DEFAULT_FOLDER_NAME from langflow.services.database.models.folder.model import Folder from langflow.services.database.models.user.model import User @@ -38,7 +39,10 @@ def create_flow( db_flow.updated_at = datetime.now(timezone.utc) if db_flow.folder_id is None: - default_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first() + # Make sure flows always have a folder + default_folder = session.exec( + select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME, Folder.user_id == current_user.id) + ).first() if default_folder: db_flow.folder_id = default_folder.id @@ -54,8 +58,22 @@ def read_flows( current_user: User = Depends(get_current_active_user), session: Session = Depends(get_session), settings_service: "SettingsService" = Depends(get_settings_service), + remove_example_flows: bool = False, ): - """Read all flows.""" + """ + Retrieve a list of flows. + + Args: + current_user (User): The current authenticated user. + session (Session): The database session. + settings_service (SettingsService): The settings service. + remove_example_flows (bool, optional): Whether to remove example flows. Defaults to False. + + + Returns: + List[Dict]: A list of flows in JSON format. + """ + try: auth_settings = settings_service.auth_settings if auth_settings.AUTO_LOGIN: @@ -70,15 +88,16 @@ def read_flows( flows = validate_is_component(flows) # type: ignore flow_ids = [flow.id for flow in flows] # with the session get the flows that DO NOT have a user_id - try: - folder = session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first() + if not remove_example_flows: + try: + folder = session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first() - example_flows = folder.flows if folder else [] - for example_flow in example_flows: - if example_flow.id not in flow_ids: - flows.append(example_flow) # type: ignore - except Exception as e: - logger.error(e) + example_flows = folder.flows if folder else [] + for example_flow in example_flows: + if example_flow.id not in flow_ids: + flows.append(example_flow) # type: ignore + except Exception as e: + logger.error(e) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) from e return [jsonable_encoder(flow) for flow in flows] @@ -117,30 +136,51 @@ def update_flow( settings_service=Depends(get_settings_service), ): """Update a flow.""" + try: + db_flow = read_flow( + session=session, + flow_id=flow_id, + current_user=current_user, + settings_service=settings_service, + ) + if not db_flow: + raise HTTPException(status_code=404, detail="Flow not found") + flow_data = flow.model_dump(exclude_unset=True) + if settings_service.settings.remove_api_keys: + flow_data = remove_api_keys(flow_data) + for key, value in flow_data.items(): + if value is not None: + setattr(db_flow, key, value) + webhook_component = get_webhook_component_in_flow(db_flow.data) + db_flow.webhook = webhook_component is not None + db_flow.updated_at = datetime.now(timezone.utc) + if db_flow.folder_id is None: + default_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first() + if default_folder: + db_flow.folder_id = default_folder.id + session.add(db_flow) + session.commit() + session.refresh(db_flow) + return db_flow + except Exception as e: + # If it is a validation error, return the error message + if hasattr(e, "errors"): + raise HTTPException(status_code=400, detail=str(e)) from e + elif "UNIQUE constraint failed" in str(e): + # Get the name of the column that failed + columns = str(e).split("UNIQUE constraint failed: ")[1].split(".")[1].split("\n")[0] + # UNIQUE constraint failed: flow.user_id, flow.name + # or UNIQUE constraint failed: flow.name + # if the column has id in it, we want the other column + column = columns.split(",")[1] if "id" in columns.split(",")[0] else columns.split(",")[0] - db_flow = read_flow( - session=session, - flow_id=flow_id, - current_user=current_user, - settings_service=settings_service, - ) - if not db_flow: - raise HTTPException(status_code=404, detail="Flow not found") - flow_data = flow.model_dump(exclude_unset=True) - if settings_service.settings.REMOVE_API_KEYS: - flow_data = remove_api_keys(flow_data) - for key, value in flow_data.items(): - if value is not None: - setattr(db_flow, key, value) - db_flow.updated_at = datetime.now(timezone.utc) - if db_flow.folder_id is None: - default_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first() - if default_folder: - db_flow.folder_id = default_folder.id - session.add(db_flow) - session.commit() - session.refresh(db_flow) - return db_flow + raise HTTPException( + status_code=400, detail=f"{column.capitalize().replace('_', ' ')} must be unique" + ) from e + elif isinstance(e, HTTPException): + raise e + else: + raise HTTPException(status_code=500, detail=str(e)) from e @router.delete("/{flow_id}", status_code=200) diff --git a/src/backend/base/langflow/api/v1/folders.py b/src/backend/base/langflow/api/v1/folders.py index 3aa57842c..7402881c7 100644 --- a/src/backend/base/langflow/api/v1/folders.py +++ b/src/backend/base/langflow/api/v1/folders.py @@ -1,5 +1,4 @@ from typing import List -from uuid import UUID import orjson from fastapi import APIRouter, Depends, File, HTTPException, Response, UploadFile, status @@ -88,7 +87,7 @@ def read_folders( def read_folder( *, session: Session = Depends(get_session), - folder_id: UUID, + folder_id: str, current_user: User = Depends(get_current_active_user), ): try: @@ -106,7 +105,7 @@ def read_folder( def update_folder( *, session: Session = Depends(get_session), - folder_id: UUID, + folder_id: str, folder: FolderUpdate, # Assuming FolderUpdate is a Pydantic model defining updatable fields current_user: User = Depends(get_current_active_user), ): @@ -155,7 +154,7 @@ def update_folder( def delete_folder( *, session: Session = Depends(get_session), - folder_id: UUID, + folder_id: str, current_user: User = Depends(get_current_active_user), ): try: @@ -177,7 +176,7 @@ def delete_folder( async def download_file( *, session: Session = Depends(get_session), - folder_id: UUID, + folder_id: str, current_user: User = Depends(get_current_active_user), ): """Download all flows from folder.""" diff --git a/src/backend/base/langflow/api/v1/login.py b/src/backend/base/langflow/api/v1/login.py index 69a1d5876..cde6bd28b 100644 --- a/src/backend/base/langflow/api/v1/login.py +++ b/src/backend/base/langflow/api/v1/login.py @@ -46,6 +46,7 @@ async def login_to_get_access_token( samesite=auth_settings.REFRESH_SAME_SITE, secure=auth_settings.REFRESH_SECURE, expires=auth_settings.REFRESH_TOKEN_EXPIRE_SECONDS, + domain=auth_settings.COOKIE_DOMAIN, ) response.set_cookie( "access_token_lf", @@ -54,6 +55,7 @@ async def login_to_get_access_token( samesite=auth_settings.ACCESS_SAME_SITE, secure=auth_settings.ACCESS_SECURE, expires=auth_settings.ACCESS_TOKEN_EXPIRE_SECONDS, + domain=auth_settings.COOKIE_DOMAIN, ) variable_service.initialize_user_variables(user.id, db) # Create default folder for user if it doesn't exist @@ -71,8 +73,7 @@ async def login_to_get_access_token( async def auto_login( response: Response, db: Session = Depends(get_session), - settings_service=Depends(get_settings_service), - variable_service: VariableService = Depends(get_variable_service), + settings_service=Depends(get_settings_service) ): auth_settings = settings_service.auth_settings if settings_service.auth_settings.AUTO_LOGIN: @@ -84,9 +85,9 @@ async def auto_login( samesite=auth_settings.ACCESS_SAME_SITE, secure=auth_settings.ACCESS_SECURE, expires=None, # Set to None to make it a session cookie + domain=auth_settings.COOKIE_DOMAIN, ) - variable_service.initialize_user_variables(user_id, db) - create_default_folder_if_it_doesnt_exist(db, user_id) + return tokens raise HTTPException( @@ -117,6 +118,7 @@ async def refresh_token( samesite=auth_settings.REFRESH_SAME_SITE, secure=auth_settings.REFRESH_SECURE, expires=auth_settings.REFRESH_TOKEN_EXPIRE_SECONDS, + domain=auth_settings.COOKIE_DOMAIN, ) response.set_cookie( "access_token_lf", @@ -125,6 +127,7 @@ async def refresh_token( samesite=auth_settings.ACCESS_SAME_SITE, secure=auth_settings.ACCESS_SECURE, expires=auth_settings.ACCESS_TOKEN_EXPIRE_SECONDS, + domain=auth_settings.COOKIE_DOMAIN, ) return tokens else: diff --git a/src/backend/base/langflow/api/v1/schemas.py b/src/backend/base/langflow/api/v1/schemas.py index 2b9eff312..9ccdb0085 100644 --- a/src/backend/base/langflow/api/v1/schemas.py +++ b/src/backend/base/langflow/api/v1/schemas.py @@ -248,6 +248,7 @@ class ResultDataResponse(BaseModel): artifacts: Optional[Any] = Field(default_factory=dict) timedelta: Optional[float] = None duration: Optional[str] = None + used_frozen_result: Optional[bool] = False class VertexBuildResponse(BaseModel): @@ -316,3 +317,7 @@ class FlowDataRequest(BaseModel): nodes: List[dict] edges: List[dict] viewport: Optional[dict] = None + + +class ConfigResponse(BaseModel): + frontend_timeout: int diff --git a/src/backend/base/langflow/api/v1/store.py b/src/backend/base/langflow/api/v1/store.py index 7d0e9bff9..d645480d0 100644 --- a/src/backend/base/langflow/api/v1/store.py +++ b/src/backend/base/langflow/api/v1/store.py @@ -54,7 +54,7 @@ def check_if_store_is_enabled( settings_service=Depends(get_settings_service), ): return { - "enabled": settings_service.settings.STORE, + "enabled": settings_service.settings.store, } diff --git a/src/backend/base/langflow/interface/document_loaders/__init__.py b/src/backend/base/langflow/base/curl/__init__.py similarity index 100% rename from src/backend/base/langflow/interface/document_loaders/__init__.py rename to src/backend/base/langflow/base/curl/__init__.py diff --git a/src/backend/base/langflow/base/curl/parse.py b/src/backend/base/langflow/base/curl/parse.py new file mode 100644 index 000000000..c86638306 --- /dev/null +++ b/src/backend/base/langflow/base/curl/parse.py @@ -0,0 +1,89 @@ +""" +This file contains a fix for the implementation of the `uncurl` library, which is available at https://github.com/spulec/uncurl.git. + +The `uncurl` library provides a way to parse and convert cURL commands into Python requests. However, there are some issues with the original implementation that this file aims to fix. + +The `parse_context` function in this file takes a cURL command as input and returns a `ParsedContext` object, which contains the parsed information from the cURL command, such as the HTTP method, URL, headers, cookies, etc. + +The `normalize_newlines` function is a helper function that replaces the line continuation character ("\") followed by a newline with a space. + + +""" + +import re +import shlex +from collections import OrderedDict, namedtuple +from http.cookies import SimpleCookie + +from uncurl.api import parser # type: ignore + +parser.add_argument("-x", "--proxy", default={}) +parser.add_argument("-U", "--proxy-user", default="") + +ParsedContext = namedtuple("ParsedContext", ["method", "url", "data", "headers", "cookies", "verify", "auth", "proxy"]) + + +def normalize_newlines(multiline_text): + return multiline_text.replace(" \\\n", " ") + + +def parse_context(curl_command): + method = "get" + + tokens = shlex.split(normalize_newlines(curl_command)) + tokens = [token for token in tokens if token and token != " "] + parsed_args = parser.parse_args(tokens) + + post_data = parsed_args.data or parsed_args.data_binary + if post_data: + method = "post" + + if parsed_args.X: + method = parsed_args.X.lower() + + cookie_dict = OrderedDict() + quoted_headers = OrderedDict() + + for curl_header in parsed_args.header: + if curl_header.startswith(":"): + occurrence = [m.start() for m in re.finditer(":", curl_header)] + header_key, header_value = curl_header[: occurrence[1]], curl_header[occurrence[1] + 1 :] + else: + header_key, header_value = curl_header.split(":", 1) + + if header_key.lower().strip("$") == "cookie": + cookie = SimpleCookie(bytes(header_value, "ascii").decode("unicode-escape")) + for key in cookie: + cookie_dict[key] = cookie[key].value + else: + quoted_headers[header_key] = header_value.strip() + + # add auth + user = parsed_args.user + if parsed_args.user: + user = tuple(user.split(":")) + + # add proxy and its authentication if it's available. + proxies = parsed_args.proxy + # proxy_auth = parsed_args.proxy_user + if parsed_args.proxy and parsed_args.proxy_user: + proxies = { + "http": "http://{}@{}/".format(parsed_args.proxy_user, parsed_args.proxy), + "https": "http://{}@{}/".format(parsed_args.proxy_user, parsed_args.proxy), + } + elif parsed_args.proxy: + proxies = { + "http": "http://{}/".format(parsed_args.proxy), + "https": "http://{}/".format(parsed_args.proxy), + } + + return ParsedContext( + method=method, + url=parsed_args.url, + data=post_data, + headers=quoted_headers, + cookies=cookie_dict, + verify=parsed_args.insecure, + auth=user, + proxy=proxies, + ) diff --git a/src/backend/base/langflow/interface/embeddings/__init__.py b/src/backend/base/langflow/base/flow_processing/__init__.py similarity index 100% rename from src/backend/base/langflow/interface/embeddings/__init__.py rename to src/backend/base/langflow/base/flow_processing/__init__.py diff --git a/src/backend/base/langflow/base/flow_processing/utils.py b/src/backend/base/langflow/base/flow_processing/utils.py new file mode 100644 index 000000000..4e121f128 --- /dev/null +++ b/src/backend/base/langflow/base/flow_processing/utils.py @@ -0,0 +1,67 @@ +from typing import List + +from langflow.graph.schema import ResultData, RunOutputs +from langflow.schema.schema import Record + + +def build_records_from_run_outputs(run_outputs: RunOutputs) -> List[Record]: + """ + Build a list of records from the given RunOutputs. + + Args: + run_outputs (RunOutputs): The RunOutputs object containing the output data. + + Returns: + List[Record]: A list of records built from the RunOutputs. + + """ + if not run_outputs: + return [] + records = [] + for result_data in run_outputs.outputs: + if result_data: + records.extend(build_records_from_result_data(result_data)) + return records + + +def build_records_from_result_data(result_data: ResultData, get_final_results_only: bool = True) -> List[Record]: + """ + Build a list of records from the given ResultData. + + Args: + result_data (ResultData): The ResultData object containing the result data. + get_final_results_only (bool, optional): Whether to include only final results. Defaults to True. + + Returns: + List[Record]: A list of records built from the ResultData. + + """ + messages = result_data.messages + if not messages: + return [] + records = [] + for message in messages: + message_dict = message if isinstance(message, dict) else message.model_dump() + if get_final_results_only: + result_data_dict = result_data.model_dump() + results = result_data_dict.get("results", {}) + inner_result = results.get("result", {}) + record = Record(data={"result": inner_result, "message": message_dict}, text_key="result") + records.append(record) + return records + + +def format_flow_output_records(records: List[Record]) -> str: + """ + Format the flow output records into a string. + + Args: + records (List[Record]): The list of records to format. + + Returns: + str: The formatted flow output records. + + """ + result = "Flow run output:\n" + results = "\n".join([record.result for record in records if record.data["message"]]) + return result + results diff --git a/src/backend/base/langflow/base/io/chat.py b/src/backend/base/langflow/base/io/chat.py index 14a09b3f5..6089f19ea 100644 --- a/src/backend/base/langflow/base/io/chat.py +++ b/src/backend/base/langflow/base/io/chat.py @@ -1,8 +1,8 @@ from typing import Optional, Union +from langflow.custom import CustomComponent from langflow.field_typing import Text from langflow.helpers.record import records_to_text -from langflow.interface.custom.custom_component import CustomComponent from langflow.memory import store_message from langflow.schema import Record diff --git a/src/backend/base/langflow/base/io/text.py b/src/backend/base/langflow/base/io/text.py index 5974e7d13..5ecfea11a 100644 --- a/src/backend/base/langflow/base/io/text.py +++ b/src/backend/base/langflow/base/io/text.py @@ -1,8 +1,8 @@ from typing import Optional +from langflow.custom import CustomComponent from langflow.field_typing import Text from langflow.helpers.record import records_to_text -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema.schema import Record diff --git a/src/backend/base/langflow/base/memory/memory.py b/src/backend/base/langflow/base/memory/memory.py index 1bb2e22ff..0fb8cf209 100644 --- a/src/backend/base/langflow/base/memory/memory.py +++ b/src/backend/base/langflow/base/memory/memory.py @@ -1,6 +1,6 @@ from typing import Optional -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema.schema import Record diff --git a/src/backend/base/langflow/base/tools/flow_tool.py b/src/backend/base/langflow/base/tools/flow_tool.py new file mode 100644 index 000000000..d0993bd99 --- /dev/null +++ b/src/backend/base/langflow/base/tools/flow_tool.py @@ -0,0 +1,117 @@ +from typing import Any, List, Optional, Type + +from asyncer import syncify +from langchain.tools import BaseTool +from langchain_core.runnables import RunnableConfig +from langchain_core.tools import ToolException +from pydantic.v1 import BaseModel + +from langflow.base.flow_processing.utils import build_records_from_result_data, format_flow_output_records +from langflow.graph.graph.base import Graph +from langflow.graph.vertex.base import Vertex +from langflow.helpers.flow import build_schema_from_inputs, get_arg_names, get_flow_inputs, run_flow + + +class FlowTool(BaseTool): + name: str + description: str + graph: Optional[Graph] = None + flow_id: Optional[str] = None + user_id: Optional[str] = None + inputs: List["Vertex"] = [] + get_final_results_only: bool = True + + @property + def args(self) -> dict: + schema = self.get_input_schema() + return schema.schema()["properties"] + + def get_input_schema(self, config: Optional[RunnableConfig] = None) -> Type[BaseModel]: + """The tool's input schema.""" + if self.args_schema is not None: + return self.args_schema + elif self.graph is not None: + return build_schema_from_inputs(self.name, get_flow_inputs(self.graph)) + else: + raise ToolException("No input schema available.") + + def _run( + self, + *args: Any, + **kwargs: Any, + ) -> str: + """Use the tool.""" + args_names = get_arg_names(self.inputs) + if len(args_names) == len(args): + kwargs = {arg["arg_name"]: arg_value for arg, arg_value in zip(args_names, args)} + elif len(args_names) != len(args) and len(args) != 0: + raise ToolException( + "Number of arguments does not match the number of inputs. Pass keyword arguments instead." + ) + tweaks = {arg["component_name"]: kwargs[arg["arg_name"]] for arg in args_names} + + run_outputs = syncify(run_flow, raise_sync_error=False)( + tweaks={key: {"input_value": value} for key, value in tweaks.items()}, + flow_id=self.flow_id, + user_id=self.user_id, + ) + if not run_outputs: + return "No output" + run_output = run_outputs[0] + + records = [] + if run_output is not None: + for output in run_output.outputs: + if output: + records.extend( + build_records_from_result_data(output, get_final_results_only=self.get_final_results_only) + ) + return format_flow_output_records(records) + + def validate_inputs(self, args_names: List[dict[str, str]], args: Any, kwargs: Any): + """Validate the inputs.""" + + if len(args) > 0 and len(args) != len(args_names): + raise ToolException( + "Number of positional arguments does not match the number of inputs. Pass keyword arguments instead." + ) + + if len(args) == len(args_names): + kwargs = {arg_name["arg_name"]: arg_value for arg_name, arg_value in zip(args_names, args)} + + missing_args = [arg["arg_name"] for arg in args_names if arg["arg_name"] not in kwargs] + if missing_args: + raise ToolException(f"Missing required arguments: {', '.join(missing_args)}") + + return kwargs + + def build_tweaks_dict(self, args, kwargs): + args_names = get_arg_names(self.inputs) + kwargs = self.validate_inputs(args_names=args_names, args=args, kwargs=kwargs) + tweaks = {arg["component_name"]: kwargs[arg["arg_name"]] for arg in args_names} + return tweaks + + async def _arun( + self, + *args: Any, + **kwargs: Any, + ) -> str: + """Use the tool asynchronously.""" + tweaks = self.build_tweaks_dict(args, kwargs) + run_outputs = await run_flow( + tweaks={key: {"input_value": value} for key, value in tweaks.items()}, + flow_id=self.flow_id, + user_id=self.user_id, + ) + if not run_outputs: + return "No output" + run_output = run_outputs[0] + + records = [] + if run_output is not None: + for output in run_output.outputs: + if output: + records.extend( + build_records_from_result_data(output, get_final_results_only=self.get_final_results_only) + ) + return format_flow_output_records(records) diff --git a/src/backend/base/langflow/components/agents/AgentInitializer.py b/src/backend/base/langflow/components/agents/AgentInitializer.py deleted file mode 100644 index d1f09d5cf..000000000 --- a/src/backend/base/langflow/components/agents/AgentInitializer.py +++ /dev/null @@ -1,52 +0,0 @@ -from typing import Callable, List, Optional, Union - -from langchain.agents import AgentExecutor, AgentType, initialize_agent, types - -from langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool -from langflow.interface.custom.custom_component import CustomComponent - - -class AgentInitializerComponent(CustomComponent): - display_name: str = "Agent Initializer" - description: str = "Initialize a Langchain Agent." - documentation: str = "https://python.langchain.com/docs/modules/agents/agent_types/" - - def build_config(self): - agents = list(types.AGENT_TO_CLASS.keys()) - # field_type and required are optional - return { - "agent": {"options": agents, "value": agents[0], "display_name": "Agent Type"}, - "max_iterations": {"display_name": "Max Iterations", "value": 10}, - "memory": {"display_name": "Memory"}, - "tools": {"display_name": "Tools"}, - "llm": {"display_name": "Language Model"}, - "code": {"advanced": True}, - } - - def build( - self, - agent: str, - llm: BaseLanguageModel, - tools: List[Tool], - max_iterations: int, - memory: Optional[BaseChatMemory] = None, - ) -> Union[AgentExecutor, Callable]: - agent = AgentType(agent) - if memory: - return initialize_agent( - tools=tools, - llm=llm, - agent=agent, - memory=memory, - return_intermediate_steps=True, - handle_parsing_errors=True, - max_iterations=max_iterations, - ) - return initialize_agent( - tools=tools, - llm=llm, - agent=agent, - return_intermediate_steps=True, - handle_parsing_errors=True, - max_iterations=max_iterations, - ) diff --git a/src/backend/base/langflow/components/agents/JsonAgent.py b/src/backend/base/langflow/components/agents/JsonAgent.py index 51f20d71d..17826ef00 100644 --- a/src/backend/base/langflow/components/agents/JsonAgent.py +++ b/src/backend/base/langflow/components/agents/JsonAgent.py @@ -1,11 +1,9 @@ from langchain.agents import AgentExecutor +from langchain_community.agent_toolkits import create_json_agent from langchain_community.agent_toolkits.json.toolkit import JsonToolkit -from langflow.field_typing import ( - BaseLanguageModel, -) -from langflow.interface.custom.custom_component import CustomComponent -from langchain_community.agent_toolkits import create_json_agent +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel class JsonAgentComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py b/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py deleted file mode 100644 index c4287569a..000000000 --- a/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py +++ /dev/null @@ -1,101 +0,0 @@ -from typing import List, Optional - -from langchain.agents.agent import AgentExecutor -from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import _get_default_system_message -from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent -from langchain.memory.token_buffer import ConversationTokenBufferMemory -from langchain_openai import ChatOpenAI - -from langflow.field_typing.range_spec import RangeSpec -from langflow.interface.custom.custom_component import CustomComponent -from pydantic.v1 import SecretStr -from langchain_core.memory import BaseMemory -from langchain_core.prompts import MessagesPlaceholder, SystemMessagePromptTemplate -from langchain_core.tools import Tool - - -class ConversationalAgent(CustomComponent): - display_name: str = "OpenAI Conversational Agent" - description: str = "Conversational Agent that can use OpenAI's function calling API" - icon = "OpenAI" - - def build_config(self): - openai_function_models = [ - "gpt-4-turbo-preview", - "gpt-4-0125-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-1106", - ] - return { - "tools": {"display_name": "Tools"}, - "memory": {"display_name": "Memory"}, - "system_message": {"display_name": "System Message"}, - "max_token_limit": {"display_name": "Max Token Limit"}, - "model_name": { - "display_name": "Model Name", - "options": openai_function_models, - "value": openai_function_models[0], - }, - "code": {"show": False}, - "temperature": { - "display_name": "Temperature", - "value": 0.2, - "rangeSpec": RangeSpec(min=0, max=2, step=0.1), - }, - } - - def build( - self, - model_name: str, - openai_api_key: str, - tools: List[Tool], - openai_api_base: Optional[str] = None, - memory: Optional[BaseMemory] = None, - system_message: Optional[SystemMessagePromptTemplate] = None, - max_token_limit: int = 2000, - temperature: float = 0.9, - ) -> AgentExecutor: - if openai_api_key: - api_key = SecretStr(openai_api_key) - else: - api_key = None - - llm = ChatOpenAI( - model=model_name, - api_key=api_key, - base_url=openai_api_base, - max_tokens=max_token_limit, - temperature=temperature, - ) - if not memory: - memory_key = "chat_history" - memory = ConversationTokenBufferMemory( - memory_key=memory_key, - return_messages=True, - output_key="output", - llm=llm, - max_token_limit=max_token_limit, - ) - else: - memory_key = memory.memory_key # type: ignore - - _system_message = system_message or _get_default_system_message() - prompt = OpenAIFunctionsAgent.create_prompt( - system_message=_system_message, # type: ignore - extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)], - ) - agent = OpenAIFunctionsAgent( - llm=llm, - tools=tools, - prompt=prompt, # type: ignore - ) - return AgentExecutor( - agent=agent, - tools=tools, # type: ignore - memory=memory, - verbose=True, - return_intermediate_steps=True, - handle_parsing_errors=True, - ) diff --git a/src/backend/base/langflow/components/agents/SQLAgent.py b/src/backend/base/langflow/components/agents/SQLAgent.py index abde4ab94..cd6b03f94 100644 --- a/src/backend/base/langflow/components/agents/SQLAgent.py +++ b/src/backend/base/langflow/components/agents/SQLAgent.py @@ -1,12 +1,12 @@ from typing import Callable, Union from langchain.agents import AgentExecutor -from langchain_community.utilities import SQLDatabase from langchain_community.agent_toolkits import SQLDatabaseToolkit from langchain_community.agent_toolkits.sql.base import create_sql_agent +from langchain_community.utilities import SQLDatabase +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel -from langflow.interface.custom.custom_component import CustomComponent class SQLAgentComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/agents/VectorStoreAgent.py b/src/backend/base/langflow/components/agents/VectorStoreAgent.py index 095f9da41..3cba51a09 100644 --- a/src/backend/base/langflow/components/agents/VectorStoreAgent.py +++ b/src/backend/base/langflow/components/agents/VectorStoreAgent.py @@ -3,8 +3,8 @@ from typing import Callable, Union from langchain.agents import AgentExecutor, create_vectorstore_agent from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel -from langflow.interface.custom.custom_component import CustomComponent class VectorStoreAgentComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py index 514a9767c..e483f0d2c 100644 --- a/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py +++ b/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py @@ -4,7 +4,7 @@ from langchain.agents import create_vectorstore_router_agent from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit from langchain_core.language_models.base import BaseLanguageModel -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class VectorStoreRouterAgentComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/agents/__init__.py b/src/backend/base/langflow/components/agents/__init__.py index f2a118b58..8bd64bab0 100644 --- a/src/backend/base/langflow/components/agents/__init__.py +++ b/src/backend/base/langflow/components/agents/__init__.py @@ -1,17 +1,13 @@ -from .AgentInitializer import AgentInitializerComponent from .CSVAgent import CSVAgentComponent from .JsonAgent import JsonAgentComponent -from .OpenAIConversationalAgent import ConversationalAgent from .SQLAgent import SQLAgentComponent from .VectorStoreAgent import VectorStoreAgentComponent from .VectorStoreRouterAgent import VectorStoreRouterAgentComponent from .XMLAgent import XMLAgentComponent __all__ = [ - "AgentInitializerComponent", "CSVAgentComponent", "JsonAgentComponent", - "ConversationalAgent", "SQLAgentComponent", "VectorStoreAgentComponent", "VectorStoreRouterAgentComponent", diff --git a/src/backend/base/langflow/components/chains/ConversationChain.py b/src/backend/base/langflow/components/chains/ConversationChain.py index 2b8dd09a3..0801f4623 100644 --- a/src/backend/base/langflow/components/chains/ConversationChain.py +++ b/src/backend/base/langflow/components/chains/ConversationChain.py @@ -2,8 +2,8 @@ from typing import Optional from langchain.chains import ConversationChain +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, BaseMemory, Text -from langflow.interface.custom.custom_component import CustomComponent class ConversationChainComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/chains/LLMChain.py b/src/backend/base/langflow/components/chains/LLMChain.py index 9fba051db..0387b50f3 100644 --- a/src/backend/base/langflow/components/chains/LLMChain.py +++ b/src/backend/base/langflow/components/chains/LLMChain.py @@ -1,11 +1,11 @@ from typing import Optional from langchain.chains.llm import LLMChain - -from langflow.field_typing import BaseLanguageModel, BaseMemory, Text -from langflow.interface.custom.custom_component import CustomComponent from langchain_core.prompts import PromptTemplate +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel, BaseMemory, Text + class LLMChainComponent(CustomComponent): display_name = "LLMChain" diff --git a/src/backend/base/langflow/components/chains/LLMCheckerChain.py b/src/backend/base/langflow/components/chains/LLMCheckerChain.py index 04e6fe67d..f413081b1 100644 --- a/src/backend/base/langflow/components/chains/LLMCheckerChain.py +++ b/src/backend/base/langflow/components/chains/LLMCheckerChain.py @@ -1,7 +1,7 @@ from langchain.chains import LLMCheckerChain +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, Text -from langflow.interface.custom.custom_component import CustomComponent class LLMCheckerChainComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/chains/LLMMathChain.py b/src/backend/base/langflow/components/chains/LLMMathChain.py index 0b7374da6..2bb573ef5 100644 --- a/src/backend/base/langflow/components/chains/LLMMathChain.py +++ b/src/backend/base/langflow/components/chains/LLMMathChain.py @@ -2,8 +2,8 @@ from typing import Optional from langchain.chains import LLMChain, LLMMathChain +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, BaseMemory, Text -from langflow.interface.custom.custom_component import CustomComponent class LLMMathChainComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/chains/RetrievalQA.py b/src/backend/base/langflow/components/chains/RetrievalQA.py index 7b6ba49e4..da77f89d4 100644 --- a/src/backend/base/langflow/components/chains/RetrievalQA.py +++ b/src/backend/base/langflow/components/chains/RetrievalQA.py @@ -3,8 +3,8 @@ from typing import Optional from langchain.chains.retrieval_qa.base import RetrievalQA from langchain_core.documents import Document +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema.schema import Record diff --git a/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py index 75a9131f5..2e0fa4ced 100644 --- a/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py +++ b/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -3,8 +3,8 @@ from typing import Optional from langchain.chains import RetrievalQAWithSourcesChain from langchain_core.documents import Document +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text -from langflow.interface.custom.custom_component import CustomComponent class RetrievalQAWithSourcesChainComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/chains/SQLGenerator.py b/src/backend/base/langflow/components/chains/SQLGenerator.py index 3b111ba71..a6ff0ee2f 100644 --- a/src/backend/base/langflow/components/chains/SQLGenerator.py +++ b/src/backend/base/langflow/components/chains/SQLGenerator.py @@ -5,8 +5,8 @@ from langchain_community.utilities.sql_database import SQLDatabase from langchain_core.prompts import PromptTemplate from langchain_core.runnables import Runnable +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, Text -from langflow.interface.custom.custom_component import CustomComponent class SQLGeneratorComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/data/APIRequest.py b/src/backend/base/langflow/components/data/APIRequest.py index 9f1ca703c..2065f90c7 100644 --- a/src/backend/base/langflow/components/data/APIRequest.py +++ b/src/backend/base/langflow/components/data/APIRequest.py @@ -1,10 +1,15 @@ import asyncio import json -from typing import List, Optional +from typing import Any, List, Optional import httpx -from langflow.interface.custom.custom_component import CustomComponent +from loguru import logger + +from langflow.base.curl.parse import parse_context +from langflow.custom import CustomComponent +from langflow.field_typing import NestedDict from langflow.schema import Record +from langflow.schema.dotdict import dotdict class APIRequest(CustomComponent): @@ -16,10 +21,15 @@ class APIRequest(CustomComponent): field_config = { "urls": {"display_name": "URLs", "info": "URLs to make requests to."}, + "curl": { + "display_name": "Curl", + "info": "Paste a curl command to populate the fields.", + "refresh_button": True, + "refresh_button_text": "", + }, "method": { "display_name": "Method", "info": "The HTTP method to use.", - "field_type": "str", "options": ["GET", "POST", "PATCH", "PUT"], "value": "GET", }, @@ -35,12 +45,33 @@ class APIRequest(CustomComponent): }, "timeout": { "display_name": "Timeout", - "field_type": "int", "info": "The timeout to use for the request.", "value": 5, }, } + def parse_curl(self, curl: str, build_config: dotdict) -> dotdict: + try: + parsed = parse_context(curl) + build_config["urls"]["value"] = [parsed.url] + build_config["method"]["value"] = parsed.method.upper() + build_config["headers"]["value"] = dict(parsed.headers) + + try: + json_data = json.loads(parsed.data) + build_config["body"]["value"] = json_data + except json.JSONDecodeError as e: + print(e) + except Exception as exc: + logger.error(f"Error parsing curl: {exc}") + raise ValueError(f"Error parsing curl: {exc}") + return build_config + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "curl" and field_value is not None: + build_config = self.parse_curl(field_value, build_config) + return build_config + async def make_request( self, client: httpx.AsyncClient, @@ -93,21 +124,25 @@ class APIRequest(CustomComponent): self, method: str, urls: List[str], - headers: Optional[Record] = None, - body: Optional[Record] = None, + curl: Optional[str] = None, + headers: Optional[NestedDict] = {}, + body: Optional[NestedDict] = {}, timeout: int = 5, ) -> List[Record]: if headers is None: headers_dict = {} - else: + elif isinstance(headers, Record): headers_dict = headers.data + else: + headers_dict = headers bodies = [] if body: - if isinstance(body, list): - bodies = [b.data for b in body] + if not isinstance(body, list): + bodies = [body] else: - bodies = [body.data] + bodies = body + bodies = [b.data if isinstance(b, Record) else b for b in bodies] # type: ignore if len(urls) != len(bodies): # add bodies with None diff --git a/src/backend/base/langflow/components/data/Directory.py b/src/backend/base/langflow/components/data/Directory.py index 87dc99287..4dfa51de3 100644 --- a/src/backend/base/langflow/components/data/Directory.py +++ b/src/backend/base/langflow/components/data/Directory.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional from langflow.base.data.utils import parallel_load_records, parse_text_file_to_record, retrieve_file_paths -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/data/File.py b/src/backend/base/langflow/components/data/File.py index 70fe1dccc..5ebb94cff 100644 --- a/src/backend/base/langflow/components/data/File.py +++ b/src/backend/base/langflow/components/data/File.py @@ -2,7 +2,7 @@ from pathlib import Path from typing import Any, Dict from langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/data/URL.py b/src/backend/base/langflow/components/data/URL.py index 2b286e126..f9e515205 100644 --- a/src/backend/base/langflow/components/data/URL.py +++ b/src/backend/base/langflow/components/data/URL.py @@ -2,7 +2,7 @@ from typing import Any, Dict from langchain_community.document_loaders.web_base import WebBaseLoader -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/data/Webhook.py b/src/backend/base/langflow/components/data/Webhook.py new file mode 100644 index 000000000..cf82e07d2 --- /dev/null +++ b/src/backend/base/langflow/components/data/Webhook.py @@ -0,0 +1,39 @@ +import json +import uuid +from typing import Any, Optional + +from langflow.custom import CustomComponent +from langflow.schema.dotdict import dotdict +from langflow.schema.schema import Record + + +class WebhookComponent(CustomComponent): + display_name = "Webhook Input" + description = "Defines a webhook input for the flow." + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "webhook_id": + build_config["webhook_id"]["value"] = uuid.uuid4().hex + return build_config + + def build_config(self): + return { + "data": { + "display_name": "Data", + "info": "Use this field to quickly test the webhook component by providing a JSON payload.", + "multiline": True, + } + } + + def build(self, data: Optional[str] = "") -> Record: + message = "" + try: + body = json.loads(data or "{}") + except json.JSONDecodeError: + body = {"payload": data} + message = f"Invalid JSON payload. Please check the format.\n\n{data}" + record = Record(data=body) + if not message: + message = json.dumps(body, indent=2) + self.status = message + return record diff --git a/src/backend/base/langflow/components/data/__init__.py b/src/backend/base/langflow/components/data/__init__.py index ca82e3eb8..c57cf8656 100644 --- a/src/backend/base/langflow/components/data/__init__.py +++ b/src/backend/base/langflow/components/data/__init__.py @@ -1,7 +1,8 @@ from .APIRequest import APIRequest from .Directory import DirectoryComponent from .File import FileComponent +from .Webhook import WebhookComponent from .URL import URLComponent -__all__ = ["APIRequest", "DirectoryComponent", "FileComponent", "URLComponent"] +__all__ = ["APIRequest", "DirectoryComponent", "FileComponent", "URLComponent", "WebhookComponent"] diff --git a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py index c8cf2a96b..e43c144b1 100644 --- a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py @@ -1,9 +1,10 @@ from typing import Optional -from langchain_community.embeddings import BedrockEmbeddings -from langflow.interface.custom.custom_component import CustomComponent +from langchain_community.embeddings import BedrockEmbeddings from langchain_core.embeddings import Embeddings +from langflow.custom import CustomComponent + class AmazonBedrockEmeddingsComponent(CustomComponent): display_name: str = "Amazon Bedrock Embeddings" diff --git a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py index 5e02890ff..dd40d64d5 100644 --- a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py @@ -1,8 +1,9 @@ -from langflow.interface.custom.custom_component import CustomComponent from langchain_core.embeddings import Embeddings from langchain_openai import AzureOpenAIEmbeddings from pydantic.v1 import SecretStr +from langflow.custom import CustomComponent + class AzureOpenAIEmbeddingsComponent(CustomComponent): display_name: str = "Azure OpenAI Embeddings" diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py index 849cd9bba..720dfa97f 100644 --- a/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py @@ -2,7 +2,7 @@ from typing import Dict, Optional from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class HuggingFaceEmbeddingsComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py index 37a08ccc6..503a6a25a 100644 --- a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py @@ -3,7 +3,7 @@ from typing import Dict, Optional from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings from pydantic.v1.types import SecretStr -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py index 23d80132f..d24c9fb30 100644 --- a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py @@ -1,7 +1,7 @@ +from langchain_mistralai.embeddings import MistralAIEmbeddings from pydantic.v1 import SecretStr -from langchain_mistralai.embeddings import MistralAIEmbeddings -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.field_typing import Embeddings diff --git a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py index 575df2d3f..8aad24735 100644 --- a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py @@ -1,9 +1,10 @@ from typing import Optional -from langchain_community.embeddings import OllamaEmbeddings -from langflow.interface.custom.custom_component import CustomComponent +from langchain_community.embeddings import OllamaEmbeddings from langchain_core.embeddings import Embeddings +from langflow.custom import CustomComponent + class OllamaEmbeddingsComponent(CustomComponent): display_name: str = "Ollama Embeddings" diff --git a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py index 0b8959101..2ff78d562 100644 --- a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py @@ -1,10 +1,10 @@ -from typing import Any, Dict, List, Optional +from typing import Dict, List, Optional from langchain_openai.embeddings.base import OpenAIEmbeddings from pydantic.v1 import SecretStr +from langflow.custom import CustomComponent from langflow.field_typing import Embeddings, NestedDict -from langflow.interface.custom.custom_component import CustomComponent class OpenAIEmbeddingsComponent(CustomComponent): @@ -94,7 +94,6 @@ class OpenAIEmbeddingsComponent(CustomComponent): allowed_special: List[str] = [], disallowed_special: List[str] = ["all"], chunk_size: int = 1000, - client: Optional[Any] = None, deployment: str = "text-embedding-ada-002", embedding_ctx_length: int = 8191, max_retries: int = 6, @@ -126,7 +125,6 @@ class OpenAIEmbeddingsComponent(CustomComponent): allowed_special=set(allowed_special), disallowed_special="all", chunk_size=chunk_size, - client=client, deployment=deployment, embedding_ctx_length=embedding_ctx_length, max_retries=max_retries, diff --git a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py index 4bfaa0003..c0d249326 100644 --- a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py @@ -2,7 +2,7 @@ from typing import List, Optional from langchain_google_vertexai import VertexAIEmbeddings -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class VertexAIEmbeddingsComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/experimental/ClearMessageHistory.py b/src/backend/base/langflow/components/experimental/ClearMessageHistory.py index 3c62b7ea3..dacfaccb4 100644 --- a/src/backend/base/langflow/components/experimental/ClearMessageHistory.py +++ b/src/backend/base/langflow/components/experimental/ClearMessageHistory.py @@ -1,4 +1,4 @@ -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.memory import delete_messages, get_messages diff --git a/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py b/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py index 5d816f112..b1d6ecd40 100644 --- a/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py +++ b/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py @@ -1,4 +1,4 @@ -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/experimental/FlowTool.py b/src/backend/base/langflow/components/experimental/FlowTool.py index 07f3b0e38..fa81f6351 100644 --- a/src/backend/base/langflow/components/experimental/FlowTool.py +++ b/src/backend/base/langflow/components/experimental/FlowTool.py @@ -1,14 +1,14 @@ from typing import Any, List, Optional -from asyncer import syncify -from langchain_core.tools import StructuredTool +from loguru import logger + +from langflow.base.tools.flow_tool import FlowTool from langflow.custom import CustomComponent from langflow.field_typing import Tool from langflow.graph.graph.base import Graph -from langflow.helpers.flow import build_function_and_schema +from langflow.helpers.flow import get_flow_inputs from langflow.schema.dotdict import dotdict from langflow.schema.schema import Record -from loguru import logger class FlowToolComponent(CustomComponent): @@ -68,18 +68,20 @@ class FlowToolComponent(CustomComponent): } async def build(self, flow_name: str, name: str, description: str, return_direct: bool = False) -> Tool: + FlowTool.update_forward_refs() flow_record = self.get_flow(flow_name) if not flow_record: raise ValueError("Flow not found.") graph = Graph.from_payload(flow_record.data["data"]) - dynamic_flow_function, schema = build_function_and_schema(flow_record, graph) - tool = StructuredTool.from_function( - func=syncify(dynamic_flow_function, raise_sync_error=False), # type: ignore - coroutine=dynamic_flow_function, + inputs = get_flow_inputs(graph) + tool = FlowTool( name=name, description=description, + graph=graph, return_direct=return_direct, - args_schema=schema, + inputs=inputs, + flow_id=str(flow_record.id), + user_id=str(self._user_id), ) description_repr = repr(tool.description).strip("'") args_str = "\n".join([f"- {arg_name}: {arg_data['description']}" for arg_name, arg_data in tool.args.items()]) diff --git a/src/backend/base/langflow/components/experimental/ListFlows.py b/src/backend/base/langflow/components/experimental/ListFlows.py index c7b421d15..07b4a4bbc 100644 --- a/src/backend/base/langflow/components/experimental/ListFlows.py +++ b/src/backend/base/langflow/components/experimental/ListFlows.py @@ -1,6 +1,6 @@ from typing import List -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/experimental/Listen.py b/src/backend/base/langflow/components/experimental/Listen.py index cab979f70..be7ddb8e3 100644 --- a/src/backend/base/langflow/components/experimental/Listen.py +++ b/src/backend/base/langflow/components/experimental/Listen.py @@ -1,4 +1,4 @@ -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/experimental/MergeRecords.py b/src/backend/base/langflow/components/experimental/MergeRecords.py index 60e5ffe20..c938b4473 100644 --- a/src/backend/base/langflow/components/experimental/MergeRecords.py +++ b/src/backend/base/langflow/components/experimental/MergeRecords.py @@ -1,4 +1,4 @@ -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/experimental/Notify.py b/src/backend/base/langflow/components/experimental/Notify.py index 9af7f8ec6..bf4391682 100644 --- a/src/backend/base/langflow/components/experimental/Notify.py +++ b/src/backend/base/langflow/components/experimental/Notify.py @@ -1,6 +1,6 @@ from typing import Optional -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/experimental/Pass.py b/src/backend/base/langflow/components/experimental/Pass.py index 9528f0591..3fdb438a0 100644 --- a/src/backend/base/langflow/components/experimental/Pass.py +++ b/src/backend/base/langflow/components/experimental/Pass.py @@ -1,7 +1,8 @@ from typing import Union -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema import Record + +from langflow.custom import CustomComponent from langflow.field_typing import Text +from langflow.schema import Record class PassComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/experimental/PythonFunction.py b/src/backend/base/langflow/components/experimental/PythonFunction.py index 28e902abe..d832e2f5c 100644 --- a/src/backend/base/langflow/components/experimental/PythonFunction.py +++ b/src/backend/base/langflow/components/experimental/PythonFunction.py @@ -1,8 +1,8 @@ from typing import Callable +from langflow.custom import CustomComponent +from langflow.custom.utils import get_function from langflow.field_typing import Code -from langflow.interface.custom.custom_component import CustomComponent -from langflow.interface.custom.utils import get_function class PythonFunctionComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/experimental/RunFlow.py b/src/backend/base/langflow/components/experimental/RunFlow.py index d3769de7a..d2e7dd285 100644 --- a/src/backend/base/langflow/components/experimental/RunFlow.py +++ b/src/backend/base/langflow/components/experimental/RunFlow.py @@ -1,8 +1,9 @@ from typing import Any, List, Optional +from langflow.base.flow_processing.utils import build_records_from_run_outputs from langflow.custom import CustomComponent from langflow.field_typing import NestedDict, Text -from langflow.graph.schema import ResultData +from langflow.graph.schema import RunOutputs from langflow.schema import Record, dotdict @@ -39,28 +40,17 @@ class RunFlowComponent(CustomComponent): }, } - def build_records_from_result_data(self, result_data: ResultData) -> List[Record]: - messages = result_data.messages - if not messages: - return [] - records = [] - for message in messages: - message_dict = message if isinstance(message, dict) else message.model_dump() - record = Record(text=message_dict.get("text", ""), data={"result": result_data}) - records.append(record) - return records - async def build(self, input_value: Text, flow_name: str, tweaks: NestedDict) -> List[Record]: - results: List[Optional[ResultData]] = await self.run_flow( + results: List[Optional[RunOutputs]] = await self.run_flow( inputs={"input_value": input_value}, flow_name=flow_name, tweaks=tweaks ) if isinstance(results, list): records = [] for result in results: if result: - records.extend(self.build_records_from_result_data(result)) + records.extend(build_records_from_run_outputs(result)) else: - records = self.build_records_from_result_data(results) + records = build_records_from_run_outputs()(results) self.status = records return records diff --git a/src/backend/base/langflow/components/experimental/RunnableExecutor.py b/src/backend/base/langflow/components/experimental/RunnableExecutor.py index 0a3593a66..82260b76b 100644 --- a/src/backend/base/langflow/components/experimental/RunnableExecutor.py +++ b/src/backend/base/langflow/components/experimental/RunnableExecutor.py @@ -1,7 +1,7 @@ from langchain_core.runnables import Runnable +from langflow.custom import CustomComponent from langflow.field_typing import Text -from langflow.interface.custom.custom_component import CustomComponent class RunnableExecComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/experimental/SQLExecutor.py b/src/backend/base/langflow/components/experimental/SQLExecutor.py index a62ac187b..e03314514 100644 --- a/src/backend/base/langflow/components/experimental/SQLExecutor.py +++ b/src/backend/base/langflow/components/experimental/SQLExecutor.py @@ -1,8 +1,8 @@ from langchain_community.tools.sql_database.tool import QuerySQLDataBaseTool from langchain_community.utilities import SQLDatabase +from langflow.custom import CustomComponent from langflow.field_typing import Text -from langflow.interface.custom.custom_component import CustomComponent class SQLExecutorComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/experimental/SplitText.py b/src/backend/base/langflow/components/experimental/SplitText.py index bd2bc921f..7156371c3 100644 --- a/src/backend/base/langflow/components/experimental/SplitText.py +++ b/src/backend/base/langflow/components/experimental/SplitText.py @@ -1,7 +1,7 @@ from typing import Optional +from langflow.custom import CustomComponent from langflow.field_typing import Text -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema import Record from langflow.utils.util import unescape_string @@ -43,7 +43,7 @@ class SplitTextComponent(CustomComponent): chunks = [chunk[:truncate_size] for chunk in chunks] for chunk in chunks: - outputs.append(Record(text=chunk, data={"parent": text})) + outputs.append(Record(data={"parent": text, "text": chunk})) self.status = outputs return outputs diff --git a/src/backend/base/langflow/components/experimental/StoreMessage.py b/src/backend/base/langflow/components/experimental/StoreMessage.py index 9f3aa60e2..761646188 100644 --- a/src/backend/base/langflow/components/experimental/StoreMessage.py +++ b/src/backend/base/langflow/components/experimental/StoreMessage.py @@ -1,6 +1,6 @@ from typing import List, Optional -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.memory import get_messages, store_message from langflow.schema import Record diff --git a/src/backend/base/langflow/components/experimental/SubFlow.py b/src/backend/base/langflow/components/experimental/SubFlow.py index 80e15c6ad..76a9538a4 100644 --- a/src/backend/base/langflow/components/experimental/SubFlow.py +++ b/src/backend/base/langflow/components/experimental/SubFlow.py @@ -2,9 +2,10 @@ from typing import Any, List, Optional from loguru import logger +from langflow.base.flow_processing.utils import build_records_from_result_data from langflow.custom import CustomComponent from langflow.graph.graph.base import Graph -from langflow.graph.schema import ResultData, RunOutputs +from langflow.graph.schema import RunOutputs from langflow.graph.vertex.base import Vertex from langflow.helpers.flow import get_flow_inputs from langflow.schema import Record @@ -92,21 +93,6 @@ class SubFlowComponent(CustomComponent): }, } - def build_records_from_result_data(self, result_data: ResultData, get_final_results_only: bool) -> List[Record]: - messages = result_data.messages - if not messages: - return [] - records = [] - for message in messages: - message_dict = message if isinstance(message, dict) else message.model_dump() - if get_final_results_only: - result_data_dict = result_data.model_dump() - results = result_data_dict.get("results", {}) - inner_result = results.get("result", {}) - record = Record(data={"result": inner_result, "message": message_dict}, text_key="result") - records.append(record) - return records - async def build(self, flow_name: str, get_final_results_only: bool = True, **kwargs) -> List[Record]: tweaks = {key: {"input_value": value} for key, value in kwargs.items()} run_outputs: List[Optional[RunOutputs]] = await self.run_flow( @@ -121,7 +107,7 @@ class SubFlowComponent(CustomComponent): if run_output is not None: for output in run_output.outputs: if output: - records.extend(self.build_records_from_result_data(output, get_final_results_only)) + records.extend(build_records_from_result_data(output, get_final_results_only)) self.status = records logger.debug(records) diff --git a/src/backend/base/langflow/components/experimental/TextOperator.py b/src/backend/base/langflow/components/experimental/TextOperator.py index 89fba13b6..ea79e92e7 100644 --- a/src/backend/base/langflow/components/experimental/TextOperator.py +++ b/src/backend/base/langflow/components/experimental/TextOperator.py @@ -1,8 +1,8 @@ from typing import Optional, Union -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema import Record +from langflow.custom import CustomComponent from langflow.field_typing import Text +from langflow.schema import Record class TextOperatorComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/helpers/CombineText.py b/src/backend/base/langflow/components/helpers/CombineText.py index fcd23c188..bedc4293d 100644 --- a/src/backend/base/langflow/components/helpers/CombineText.py +++ b/src/backend/base/langflow/components/helpers/CombineText.py @@ -1,4 +1,4 @@ -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.field_typing import Text diff --git a/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py b/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py index 20cd4db29..67d315739 100644 --- a/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py +++ b/src/backend/base/langflow/components/helpers/CombineTextsUnsorted.py @@ -1,4 +1,4 @@ -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.field_typing import Text diff --git a/src/backend/base/langflow/components/helpers/CustomComponent.py b/src/backend/base/langflow/components/helpers/CustomComponent.py index ce3d8c62c..7313323a9 100644 --- a/src/backend/base/langflow/components/helpers/CustomComponent.py +++ b/src/backend/base/langflow/components/helpers/CustomComponent.py @@ -1,6 +1,6 @@ # from langflow.field_typing import Data +from langflow.custom import CustomComponent from langflow.schema import Record -from langflow.interface.custom.custom_component import CustomComponent class Component(CustomComponent): diff --git a/src/backend/base/langflow/components/helpers/DocumentToRecord.py b/src/backend/base/langflow/components/helpers/DocumentToRecord.py index 362c0a9c1..5adaf7ab4 100644 --- a/src/backend/base/langflow/components/helpers/DocumentToRecord.py +++ b/src/backend/base/langflow/components/helpers/DocumentToRecord.py @@ -2,7 +2,7 @@ from typing import List from langchain_core.documents import Document -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/helpers/IDGenerator.py b/src/backend/base/langflow/components/helpers/IDGenerator.py index d10574eef..1e4e223b1 100644 --- a/src/backend/base/langflow/components/helpers/IDGenerator.py +++ b/src/backend/base/langflow/components/helpers/IDGenerator.py @@ -1,7 +1,7 @@ import uuid from typing import Any, Optional -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class UUIDGeneratorComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/helpers/MessageHistory.py b/src/backend/base/langflow/components/helpers/MessageHistory.py index 0f208e6eb..221d90c4e 100644 --- a/src/backend/base/langflow/components/helpers/MessageHistory.py +++ b/src/backend/base/langflow/components/helpers/MessageHistory.py @@ -1,6 +1,6 @@ from typing import List, Optional -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.memory import get_messages from langflow.schema import Record diff --git a/src/backend/base/langflow/components/helpers/RecordsToText.py b/src/backend/base/langflow/components/helpers/RecordsToText.py index 8f4fed311..d3e418792 100644 --- a/src/backend/base/langflow/components/helpers/RecordsToText.py +++ b/src/backend/base/langflow/components/helpers/RecordsToText.py @@ -1,6 +1,6 @@ +from langflow.custom import CustomComponent from langflow.field_typing import Text from langflow.helpers.record import records_to_text -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/helpers/UpdateRecord.py b/src/backend/base/langflow/components/helpers/UpdateRecord.py index 9f165e146..e3153d6d7 100644 --- a/src/backend/base/langflow/components/helpers/UpdateRecord.py +++ b/src/backend/base/langflow/components/helpers/UpdateRecord.py @@ -1,4 +1,4 @@ -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/inputs/Prompt.py b/src/backend/base/langflow/components/inputs/Prompt.py index 3b44a6d12..2c76e6132 100644 --- a/src/backend/base/langflow/components/inputs/Prompt.py +++ b/src/backend/base/langflow/components/inputs/Prompt.py @@ -1,7 +1,7 @@ from langchain_core.prompts import PromptTemplate +from langflow.custom import CustomComponent from langflow.field_typing import Prompt, TemplateField, Text -from langflow.interface.custom.custom_component import CustomComponent class PromptComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py index 23eebee10..848d10985 100644 --- a/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py @@ -3,7 +3,7 @@ # We need to make sure this class is importable from the context where this code will be running. from langchain_community.utilities.bing_search import BingSearchAPIWrapper -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class BingSearchAPIWrapperComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py index 3f2f67faf..5e45219cc 100644 --- a/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py @@ -2,7 +2,7 @@ from typing import Callable, Union from langchain_community.utilities.google_search import GoogleSearchAPIWrapper -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class GoogleSearchAPIWrapperComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py index e70e85cf4..2b9a49458 100644 --- a/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py @@ -4,7 +4,7 @@ from typing import Dict # If this class does not exist, you would need to create it or import the appropriate class from another module from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class GoogleSerperAPIWrapperComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py index df783470a..c0300cff0 100644 --- a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py +++ b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py @@ -13,7 +13,7 @@ from langchain_core.documents import Document -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.services.database.models.base import orjson_dumps diff --git a/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py b/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py index 7bbbdb870..93c46087d 100644 --- a/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py +++ b/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py @@ -1,6 +1,6 @@ from langchain_experimental.sql.base import SQLDatabase -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class SQLDatabaseComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py b/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py index d255f34b7..4fe0706f8 100644 --- a/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py @@ -2,7 +2,7 @@ from typing import Dict, Optional from langchain_community.utilities.searx_search import SearxSearchWrapper -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class SearxSearchWrapperComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py index c64a26e79..d8aa404cb 100644 --- a/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py @@ -2,7 +2,7 @@ from typing import Callable, Union from langchain_community.utilities.serpapi import SerpAPIWrapper -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class SerpAPIWrapperComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py index 144792315..1c10dd4bd 100644 --- a/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py @@ -2,7 +2,7 @@ from typing import Callable, Union from langchain_community.utilities.wikipedia import WikipediaAPIWrapper -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent # Assuming WikipediaAPIWrapper is a class that needs to be imported. # The import statement is not included as it is not provided in the JSON diff --git a/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py index bc224e83c..42be1f199 100644 --- a/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py @@ -2,7 +2,7 @@ from typing import Callable, Union from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent # Since all the fields in the JSON have show=False, we will only create a basic component # without any configurable fields. diff --git a/src/backend/base/langflow/components/memories/ZepMessageReader.py b/src/backend/base/langflow/components/memories/ZepMessageReader.py index bac6e9f1a..75b27091f 100644 --- a/src/backend/base/langflow/components/memories/ZepMessageReader.py +++ b/src/backend/base/langflow/components/memories/ZepMessageReader.py @@ -116,19 +116,18 @@ class ZepMessageReaderComponent(BaseMemoryComponent): url: Optional[Text] = None, api_key: Optional[Text] = None, query: Optional[Text] = None, - search_scope: SearchScope = SearchScope.messages, - search_type: SearchType = SearchType.similarity, + search_scope: str = SearchScope.messages, + search_type: str = SearchType.similarity, limit: Optional[int] = None, ) -> list[Record]: try: - from zep_python import ZepClient - from zep_python.langchain import ZepChatMessageHistory - # Monkeypatch API_BASE_PATH to # avoid 404 # This is a workaround for the local Zep instance # cloud Zep works with v2 import zep_python.zep_client + from zep_python import ZepClient + from zep_python.langchain import ZepChatMessageHistory zep_python.zep_client.API_BASE_PATH = api_base_path except ImportError: diff --git a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py index ff36820f5..0e27e620f 100644 --- a/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py +++ b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py @@ -1,8 +1,9 @@ from typing import Optional -from langflow.field_typing import BaseLanguageModel + from langchain_community.llms.bedrock import Bedrock -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel class AmazonBedrockComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py index d83ad23a0..786d558bb 100644 --- a/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py +++ b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py @@ -1,9 +1,10 @@ from typing import Optional + from langchain_anthropic import ChatAnthropic +from langchain_core.language_models import BaseLanguageModel from pydantic.v1 import SecretStr -from langflow.interface.custom.custom_component import CustomComponent -from langchain_core.language_models import BaseLanguageModel +from langflow.custom import CustomComponent class ChatAntropicSpecsComponent(CustomComponent): @@ -34,8 +35,8 @@ class ChatAntropicSpecsComponent(CustomComponent): }, "max_tokens": { "display_name": "Max Tokens", - "field_type": "int", - "value": 256, + "advanced": True, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "temperature": { "display_name": "Temperature", diff --git a/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py b/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py index c0fd2b779..947a1e2a3 100644 --- a/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py @@ -1,10 +1,11 @@ from typing import Optional -from langflow.interface.custom.custom_component import CustomComponent from langchain_core.language_models import BaseLanguageModel from langchain_openai import AzureChatOpenAI from pydantic.v1 import SecretStr +from langflow.custom import CustomComponent + class AzureChatOpenAISpecsComponent(CustomComponent): display_name: str = "AzureChatOpenAI" @@ -65,11 +66,8 @@ class AzureChatOpenAISpecsComponent(CustomComponent): }, "max_tokens": { "display_name": "Max Tokens", - "value": 1000, - "required": False, - "field_type": "int", "advanced": True, - "info": "Maximum number of tokens to generate.", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "code": {"show": False}, } @@ -96,7 +94,7 @@ class AzureChatOpenAISpecsComponent(CustomComponent): api_version=api_version, api_key=azure_api_key, temperature=temperature, - max_tokens=max_tokens, + max_tokens=max_tokens or None, ) except Exception as e: raise ValueError("Could not connect to AzureOpenAI API.") from e diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py index a60fb9a64..a353410ad 100644 --- a/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py +++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py @@ -1,10 +1,11 @@ from typing import Optional from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint + from pydantic.v1 import SecretStr +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel -from langflow.interface.custom.custom_component import CustomComponent class QianfanChatEndpointComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py index cc4ba3b9f..273bb5d98 100644 --- a/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py +++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py @@ -2,7 +2,7 @@ from typing import Optional from langchain_community.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel diff --git a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py index b03a9b737..7e4000d9b 100644 --- a/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py @@ -46,9 +46,8 @@ class AnthropicLLM(CustomComponent): }, "max_tokens": { "display_name": "Max Tokens", - "field_type": "int", "advanced": True, - "value": 256, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "temperature": { "display_name": "Temperature", diff --git a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py index 840682f4d..b3bce849e 100644 --- a/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py @@ -1,8 +1,9 @@ from typing import Any, Dict, Optional from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException + +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel -from langflow.interface.custom.custom_component import CustomComponent class ChatLiteLLMComponent(CustomComponent): @@ -81,12 +82,9 @@ class ChatLiteLLMComponent(CustomComponent): "default": 1, }, "max_tokens": { - "display_name": "Max tokens", - "field_type": "int", - "advanced": False, - "required": False, - "default": 256, - "info": "The maximum number of tokens to generate for each chat completion.", + "display_name": "Max Tokens", + "advanced": True, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "max_retries": { "display_name": "Max retries", diff --git a/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py b/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py index 90f94aacd..73bbc3220 100644 --- a/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatMistralSpecs.py @@ -77,7 +77,7 @@ class MistralAIModelComponent(CustomComponent): output = ChatMistralAI( model_name=model, api_key=(SecretStr(mistral_api_key) if mistral_api_key else None), - max_tokens=max_tokens, + max_tokens=max_tokens or None, temperature=temperature, endpoint=mistral_api_base, ) diff --git a/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py b/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py index 6afde420c..610f4d110 100644 --- a/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py @@ -1,11 +1,11 @@ -from typing import Any, Dict, List, Optional +from typing import Dict, List, Optional # from langchain_community.chat_models import ChatOllama from langchain_community.chat_models import ChatOllama from langchain_core.language_models.chat_models import BaseChatModel # from langchain.chat_models import ChatOllama -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent # from langchain.callbacks.manager import CallbackManager @@ -182,7 +182,7 @@ class ChatOllamaComponent(CustomComponent): num_ctx: Optional[int] = None, num_gpu: Optional[int] = None, format: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None, + metadata: Optional[Dict] = None, num_thread: Optional[int] = None, repeat_penalty: Optional[float] = None, stop: Optional[List[str]] = None, diff --git a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py index 75f893582..ff26d5923 100644 --- a/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py @@ -3,10 +3,9 @@ from typing import Optional from langchain_openai import ChatOpenAI from pydantic.v1 import SecretStr - from langflow.base.models.openai_constants import MODEL_NAMES +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, NestedDict -from langflow.interface.custom.custom_component import CustomComponent class ChatOpenAIComponent(CustomComponent): @@ -18,8 +17,8 @@ class ChatOpenAIComponent(CustomComponent): return { "max_tokens": { "display_name": "Max Tokens", - "advanced": False, - "required": False, + "advanced": True, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "model_kwargs": { "display_name": "Model Kwargs", @@ -52,7 +51,7 @@ class ChatOpenAIComponent(CustomComponent): def build( self, - max_tokens: Optional[int] = 256, + max_tokens: Optional[int] = 0, model_kwargs: NestedDict = {}, model_name: str = "gpt-4o", openai_api_base: Optional[str] = None, @@ -66,7 +65,7 @@ class ChatOpenAIComponent(CustomComponent): else: api_key = None return ChatOpenAI( - max_tokens=max_tokens, + max_tokens=max_tokens or None, model_kwargs=model_kwargs, model=model_name, base_url=openai_api_base, diff --git a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py index f2c377546..0df7a0465 100644 --- a/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py @@ -1,10 +1,9 @@ -from typing import List, Optional +from typing import Optional from langchain_community.chat_models.vertexai import ChatVertexAI -from langchain_core.messages.base import BaseMessage +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel -from langflow.interface.custom.custom_component import CustomComponent class ChatVertexAIComponent(CustomComponent): @@ -65,7 +64,6 @@ class ChatVertexAIComponent(CustomComponent): self, credentials: Optional[str], project: str, - examples: Optional[List[BaseMessage]] = [], location: str = "us-central1", max_output_tokens: int = 128, model_name: str = "chat-bison", @@ -76,7 +74,6 @@ class ChatVertexAIComponent(CustomComponent): ) -> BaseLanguageModel: return ChatVertexAI( credentials=credentials, - examples=examples, location=location, max_output_tokens=max_output_tokens, model_name=model_name, diff --git a/src/backend/base/langflow/components/model_specs/CohereSpecs.py b/src/backend/base/langflow/components/model_specs/CohereSpecs.py index eeda381a4..2e2a1fa7e 100644 --- a/src/backend/base/langflow/components/model_specs/CohereSpecs.py +++ b/src/backend/base/langflow/components/model_specs/CohereSpecs.py @@ -1,7 +1,10 @@ -from langchain_community.llms.cohere import Cohere -from langchain_core.language_models.base import BaseLanguageModel +from typing import Optional -from langflow.interface.custom.custom_component import CustomComponent +from langchain_cohere import ChatCohere +from langchain_core.language_models.base import BaseLanguageModel +from pydantic.v1 import SecretStr + +from langflow.custom import CustomComponent class CohereComponent(CustomComponent): @@ -13,14 +16,22 @@ class CohereComponent(CustomComponent): def build_config(self): return { "cohere_api_key": {"display_name": "Cohere API Key", "type": "password", "password": True}, - "max_tokens": {"display_name": "Max Tokens", "default": 256, "type": "int", "show": True}, + "max_tokens": { + "display_name": "Max Tokens", + "advanced": True, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + }, "temperature": {"display_name": "Temperature", "default": 0.75, "type": "float", "show": True}, } def build( self, cohere_api_key: str, - max_tokens: int = 256, + max_tokens: Optional[int] = 256, temperature: float = 0.75, ) -> BaseLanguageModel: - return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) # type: ignore + if cohere_api_key: + api_key = SecretStr(cohere_api_key) + else: + api_key = None + return ChatCohere(cohere_api_key=api_key, max_tokens=max_tokens or None, temperature=temperature) # type: ignore diff --git a/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py b/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py index 8a3894292..534085938 100644 --- a/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py @@ -3,8 +3,8 @@ from typing import Optional from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore from pydantic.v1.types import SecretStr +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, RangeSpec -from langflow.interface.custom.custom_component import CustomComponent class GoogleGenerativeAIComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py index c145105ea..4de68365f 100644 --- a/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py +++ b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py @@ -1,9 +1,9 @@ from typing import Optional -from langflow.field_typing import BaseLanguageModel from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel class HuggingFaceEndpointsComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py index 4ba5502d3..1c416f1b1 100644 --- a/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py +++ b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py @@ -1,9 +1,9 @@ from typing import List, Optional -from langflow.field_typing import BaseLanguageModel from langchain_community.llms.ollama import Ollama -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel class OllamaLLM(CustomComponent): diff --git a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py index c9664408d..49b120d35 100644 --- a/src/backend/base/langflow/components/model_specs/VertexAISpecs.py +++ b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py @@ -1,9 +1,9 @@ from typing import Dict, Optional -from langflow.field_typing import BaseLanguageModel from langchain_community.llms.vertexai import VertexAI -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel class VertexAIComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/models/AnthropicModel.py b/src/backend/base/langflow/components/models/AnthropicModel.py index 1e5a7aec5..cfe9ed900 100644 --- a/src/backend/base/langflow/components/models/AnthropicModel.py +++ b/src/backend/base/langflow/components/models/AnthropicModel.py @@ -49,9 +49,8 @@ class AnthropicLLM(LCModelComponent): }, "max_tokens": { "display_name": "Max Tokens", - "field_type": "int", "advanced": True, - "value": 256, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "temperature": { "display_name": "Temperature", diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py index a2ee20b28..c296a8fae 100644 --- a/src/backend/base/langflow/components/models/AzureOpenAIModel.py +++ b/src/backend/base/langflow/components/models/AzureOpenAIModel.py @@ -74,9 +74,8 @@ class AzureChatOpenAIComponent(LCModelComponent): }, "max_tokens": { "display_name": "Max Tokens", - "value": 1000, "advanced": True, - "info": "Maximum number of tokens to generate.", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "code": {"show": False}, "input_value": {"display_name": "Input"}, @@ -117,7 +116,7 @@ class AzureChatOpenAIComponent(LCModelComponent): api_version=api_version, api_key=secret_api_key, temperature=temperature, - max_tokens=max_tokens, + max_tokens=max_tokens or None, ) except Exception as e: raise ValueError("Could not connect to AzureOpenAI API.") from e diff --git a/src/backend/base/langflow/components/models/ChatLiteLLMModel.py b/src/backend/base/langflow/components/models/ChatLiteLLMModel.py index 95574f0a5..054b59d12 100644 --- a/src/backend/base/langflow/components/models/ChatLiteLLMModel.py +++ b/src/backend/base/langflow/components/models/ChatLiteLLMModel.py @@ -93,9 +93,7 @@ class ChatLiteLLMModelComponent(LCModelComponent): }, "max_tokens": { "display_name": "Max tokens", - "field_type": "int", "advanced": False, - "required": False, "default": 256, "info": "The maximum number of tokens to generate for each chat completion.", }, diff --git a/src/backend/base/langflow/components/models/CohereModel.py b/src/backend/base/langflow/components/models/CohereModel.py index 665aacc13..3bd12c095 100644 --- a/src/backend/base/langflow/components/models/CohereModel.py +++ b/src/backend/base/langflow/components/models/CohereModel.py @@ -1,10 +1,10 @@ from typing import Optional -from langchain_community.chat_models.cohere import ChatCohere from pydantic.v1 import SecretStr from langflow.field_typing import Text from langflow.base.constants import STREAM_INFO_TEXT from langflow.base.models.model import LCModelComponent +from langchain_cohere import ChatCohere class CohereComponent(LCModelComponent): @@ -34,9 +34,7 @@ class CohereComponent(LCModelComponent): "max_tokens": { "display_name": "Max Tokens", "advanced": True, - "default": 256, - "type": "int", - "show": True, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "temperature": { "display_name": "Temperature", diff --git a/src/backend/base/langflow/components/models/MistralModel.py b/src/backend/base/langflow/components/models/MistralModel.py index b8834b314..305a45e4b 100644 --- a/src/backend/base/langflow/components/models/MistralModel.py +++ b/src/backend/base/langflow/components/models/MistralModel.py @@ -31,6 +31,7 @@ class MistralAIModelComponent(LCModelComponent): "max_tokens": { "display_name": "Max Tokens", "advanced": True, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "model_name": { "display_name": "Model Name", @@ -125,7 +126,7 @@ class MistralAIModelComponent(LCModelComponent): api_key = None chat_model = ChatMistralAI( - max_tokens=max_tokens, + max_tokens=max_tokens or None, model_name=model_name, endpoint=mistral_api_base, api_key=api_key, diff --git a/src/backend/base/langflow/components/models/OllamaModel.py b/src/backend/base/langflow/components/models/OllamaModel.py index 41d75ba3e..f591e4a5c 100644 --- a/src/backend/base/langflow/components/models/OllamaModel.py +++ b/src/backend/base/langflow/components/models/OllamaModel.py @@ -1,17 +1,12 @@ from typing import Any, Dict, List, Optional -# from langchain_community.chat_models import ChatOllama -from langchain_community.chat_models import ChatOllama +import httpx +from langchain_community.chat_models.ollama import ChatOllama from langflow.base.constants import STREAM_INFO_TEXT from langflow.base.models.model import LCModelComponent - -# from langchain.chat_models import ChatOllama from langflow.field_typing import Text -# whe When a callback component is added to Langflow, the comment must be uncommented. -# from langchain.callbacks.manager import CallbackManager - class ChatOllamaComponent(LCModelComponent): display_name = "Ollama" @@ -20,11 +15,13 @@ class ChatOllamaComponent(LCModelComponent): field_order = [ "base_url", + "headers", + "keep_alive_flag", + "keep_alive", + "metadata", "model", "temperature", "cache", - "callback_manager", - "callbacks", "format", "metadata", "mirostat", @@ -54,12 +51,32 @@ class ChatOllamaComponent(LCModelComponent): "base_url": { "display_name": "Base URL", "info": "Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.", + }, + "format": { + "display_name": "Format", + "info": "Specify the format of the output (e.g., json)", "advanced": True, }, + "headers": { + "display_name": "Headers", + "advanced": True, + }, + "keep_alive_flag": { + "display_name": "Unload interval", + "options": ["Keep", "Immediately", "Minute", "Hour", "sec"], + "real_time_refresh": True, + "refresh_button": True, + }, + "keep_alive": { + "display_name": "interval", + "info": "How long the model will stay loaded into memory.", + }, "model": { "display_name": "Model Name", - "value": "llama2", + "options": [], "info": "Refer to https://ollama.ai/library for more models.", + "real_time_refresh": True, + "refresh_button": True, }, "temperature": { "display_name": "Temperature", @@ -67,31 +84,6 @@ class ChatOllamaComponent(LCModelComponent): "value": 0.8, "info": "Controls the creativity of model responses.", }, - "cache": { - "display_name": "Cache", - "field_type": "bool", - "info": "Enable or disable caching.", - "advanced": True, - "value": False, - }, - ### When a callback component is added to Langflow, the comment must be uncommented. ### - # "callback_manager": { - # "display_name": "Callback Manager", - # "info": "Optional callback manager for additional functionality.", - # "advanced": True, - # }, - # "callbacks": { - # "display_name": "Callbacks", - # "info": "Callbacks to execute during model runtime.", - # "advanced": True, - # }, - ######################################################################################## - "format": { - "display_name": "Format", - "field_type": "str", - "info": "Specify the format of the output (e.g., json).", - "advanced": True, - }, "metadata": { "display_name": "Metadata", "info": "Metadata to add to the run trace.", @@ -101,20 +93,23 @@ class ChatOllamaComponent(LCModelComponent): "display_name": "Mirostat", "options": ["Disabled", "Mirostat", "Mirostat 2.0"], "info": "Enable/disable Mirostat sampling for controlling perplexity.", - "value": "Disabled", - "advanced": True, + "advanced": False, + "real_time_refresh": True, + "refresh_button": True, }, "mirostat_eta": { "display_name": "Mirostat Eta", "field_type": "float", "info": "Learning rate for Mirostat algorithm. (Default: 0.1)", "advanced": True, + "real_time_refresh": True, }, "mirostat_tau": { "display_name": "Mirostat Tau", "field_type": "float", "info": "Controls the balance between coherence and diversity of the output. (Default: 5.0)", "advanced": True, + "real_time_refresh": True, }, "num_ctx": { "display_name": "Context Window Size", @@ -211,25 +206,76 @@ class ChatOllamaComponent(LCModelComponent): }, } + def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None): + if field_name == "mirostat": + if field_value == "Disabled": + build_config["mirostat_eta"]["advanced"] = True + build_config["mirostat_tau"]["advanced"] = True + build_config["mirostat_eta"]["value"] = None + build_config["mirostat_tau"]["value"] = None + + else: + build_config["mirostat_eta"]["advanced"] = False + build_config["mirostat_tau"]["advanced"] = False + + if field_value == "Mirostat 2.0": + build_config["mirostat_eta"]["value"] = 0.2 + build_config["mirostat_tau"]["value"] = 10 + else: + build_config["mirostat_eta"]["value"] = 0.1 + build_config["mirostat_tau"]["value"] = 5 + + if field_name == "model": + base_url_dict = build_config.get("base_url", {}) + base_url_load_from_db = base_url_dict.get("load_from_db", False) + base_url_value = base_url_dict.get("value") + if base_url_load_from_db: + base_url_value = self.variables(base_url_value) + elif not base_url_value: + base_url_value = "http://localhost:11434" + build_config["model"]["options"] = self.get_model(base_url_value + "/api/tags") + + if field_name == "keep_alive_flag": + if field_value == "Keep": + build_config["keep_alive"]["value"] = "-1" + build_config["keep_alive"]["advanced"] = True + elif field_value == "Immediately": + build_config["keep_alive"]["value"] = "0" + build_config["keep_alive"]["advanced"] = True + else: + build_config["keep_alive"]["advanced"] = False + + return build_config + + def get_model(self, url: str) -> List[str]: + try: + with httpx.Client() as client: + response = client.get(url) + response.raise_for_status() + data = response.json() + + model_names = [model["name"] for model in data.get("models", [])] + return model_names + except Exception as e: + raise ValueError("Could not retrieve models") from e + return [""] + def build( self, base_url: Optional[str], model: str, input_value: Text, - mirostat: Optional[str], + mirostat: Optional[str] = "Disabled", mirostat_eta: Optional[float] = None, mirostat_tau: Optional[float] = None, - ### When a callback component is added to Langflow, the comment must be uncommented.### - # callback_manager: Optional[CallbackManager] = None, - # callbacks: Optional[List[Callbacks]] = None, - ####################################################################################### repeat_last_n: Optional[int] = None, verbose: Optional[bool] = None, - cache: Optional[bool] = None, + keep_alive: Optional[int] = None, + keep_alive_flag: Optional[str] = "Keep", num_ctx: Optional[int] = None, num_gpu: Optional[int] = None, format: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None, + metadata: Optional[Dict] = None, num_thread: Optional[int] = None, repeat_penalty: Optional[float] = None, stop: Optional[List[str]] = None, @@ -247,30 +293,33 @@ class ChatOllamaComponent(LCModelComponent): if not base_url: base_url = "http://localhost:11434" - # Mapping mirostat settings to their corresponding values - mirostat_options = {"Mirostat": 1, "Mirostat 2.0": 2} + if keep_alive_flag == "Minute": + keep_alive_instance = f"{keep_alive}m" + elif keep_alive_flag == "Hour": + keep_alive_instance = f"{keep_alive}h" + elif keep_alive_flag == "sec": + keep_alive_instance = f"{keep_alive}s" + elif keep_alive_flag == "Keep": + keep_alive_instance = "-1" + elif keep_alive_flag == "Immediately": + keep_alive_instance = "0" + else: + keep_alive_instance = "Invalid option" - # Default to 0 for 'Disabled' - mirostat_value = mirostat_options.get(mirostat, 0) # type: ignore + mirostat_instance = 0 - # Set mirostat_eta and mirostat_tau to None if mirostat is disabled - if mirostat_value == 0: - mirostat_eta = None - mirostat_tau = None + if mirostat == "disable": + mirostat_instance = 0 # Mapping system settings to their corresponding values llm_params = { "base_url": base_url, - "cache": cache, "model": model, - "mirostat": mirostat_value, + "mirostat": mirostat_instance, + "keep_alive": keep_alive_instance, "format": format, "metadata": metadata, "tags": tags, - ## When a callback component is added to Langflow, the comment must be uncommented.## - # "callback_manager": callback_manager, - # "callbacks": callbacks, - ##################################################################################### "mirostat_eta": mirostat_eta, "mirostat_tau": mirostat_tau, "num_ctx": num_ctx, diff --git a/src/backend/base/langflow/components/models/OpenAIModel.py b/src/backend/base/langflow/components/models/OpenAIModel.py index 149c93ad3..7adaf7a92 100644 --- a/src/backend/base/langflow/components/models/OpenAIModel.py +++ b/src/backend/base/langflow/components/models/OpenAIModel.py @@ -32,6 +32,7 @@ class OpenAIModelComponent(LCModelComponent): "max_tokens": { "display_name": "Max Tokens", "advanced": True, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", }, "model_kwargs": { "display_name": "Model Kwargs", @@ -93,7 +94,7 @@ class OpenAIModelComponent(LCModelComponent): api_key = None output = ChatOpenAI( - max_tokens=max_tokens, + max_tokens=max_tokens or None, model_kwargs=model_kwargs, model=model_name, base_url=openai_api_base, diff --git a/src/backend/base/langflow/components/models/VertexAiModel.py b/src/backend/base/langflow/components/models/VertexAiModel.py index ff520e0d1..a992447f4 100644 --- a/src/backend/base/langflow/components/models/VertexAiModel.py +++ b/src/backend/base/langflow/components/models/VertexAiModel.py @@ -1,6 +1,5 @@ -from typing import List, Optional +from typing import Optional -from langchain_core.messages.base import BaseMessage from langflow.base.constants import STREAM_INFO_TEXT from langflow.base.models.model import LCModelComponent @@ -93,7 +92,6 @@ class ChatVertexAIComponent(LCModelComponent): input_value: Text, credentials: Optional[str], project: str, - examples: Optional[List[BaseMessage]] = [], location: str = "us-central1", max_output_tokens: int = 128, model_name: str = "chat-bison", @@ -112,7 +110,6 @@ class ChatVertexAIComponent(LCModelComponent): ) output = ChatVertexAI( credentials=credentials, - examples=examples, location=location, max_output_tokens=max_output_tokens, model_name=model_name, diff --git a/src/backend/base/langflow/components/retrievers/AmazonKendra.py b/src/backend/base/langflow/components/retrievers/AmazonKendra.py index 436f69d0f..23ab9191a 100644 --- a/src/backend/base/langflow/components/retrievers/AmazonKendra.py +++ b/src/backend/base/langflow/components/retrievers/AmazonKendra.py @@ -1,9 +1,10 @@ from typing import Optional -from langchain_community.retrievers import AmazonKendraRetriever -from langflow.interface.custom.custom_component import CustomComponent +from langchain_community.retrievers import AmazonKendraRetriever from langchain_core.retrievers import BaseRetriever +from langflow.custom import CustomComponent + class AmazonKendraRetrieverComponent(CustomComponent): display_name: str = "Amazon Kendra Retriever" diff --git a/src/backend/base/langflow/components/retrievers/MetalRetriever.py b/src/backend/base/langflow/components/retrievers/MetalRetriever.py index 4f1e71dd1..55fbcff0d 100644 --- a/src/backend/base/langflow/components/retrievers/MetalRetriever.py +++ b/src/backend/base/langflow/components/retrievers/MetalRetriever.py @@ -1,9 +1,10 @@ from typing import Optional + from langchain_community.retrievers import MetalRetriever +from langchain_core.retrievers import BaseRetriever from metal_sdk.metal import Metal # type: ignore -from langflow.interface.custom.custom_component import CustomComponent -from langchain_core.retrievers import BaseRetriever +from langflow.custom import CustomComponent class MetalRetrieverComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py index 8dd6d8579..d9197ece2 100644 --- a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py +++ b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py @@ -2,8 +2,8 @@ from typing import Optional from langchain.retrievers import MultiQueryRetriever -from langflow.field_typing import BaseRetriever, PromptTemplate, BaseLanguageModel -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel, BaseRetriever, PromptTemplate, Text class MultiQueryRetrieverComponent(CustomComponent): @@ -41,10 +41,13 @@ class MultiQueryRetrieverComponent(CustomComponent): self, llm: BaseLanguageModel, retriever: BaseRetriever, - prompt: Optional[PromptTemplate] = None, + prompt: Optional[Text] = None, parser_key: str = "lines", ) -> MultiQueryRetriever: if not prompt: return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key) else: - return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, prompt=prompt, parser_key=parser_key) + prompt_template = PromptTemplate.from_template(prompt) + return MultiQueryRetriever.from_llm( + llm=llm, retriever=retriever, prompt=prompt_template, parser_key=parser_key + ) diff --git a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py index 0f5db6383..0c5c4fff5 100644 --- a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py +++ b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py @@ -1,13 +1,14 @@ import json from typing import List + from langchain.chains.query_constructor.base import AttributeInfo from langchain.retrievers.self_query.base import SelfQueryRetriever - -from langflow.interface.custom.custom_component import CustomComponent from langchain_core.language_models import BaseLanguageModel from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent + class VectaraSelfQueryRetriverComponent(CustomComponent): """ diff --git a/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py b/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py index 43f1aab71..6460e0458 100644 --- a/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py +++ b/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py @@ -1,7 +1,7 @@ from langchain_core.vectorstores import VectorStoreRetriever +from langflow.custom import CustomComponent from langflow.field_typing import VectorStore -from langflow.interface.custom.custom_component import CustomComponent class VectoStoreRetrieverComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py index 8c23720f2..ee340ab26 100644 --- a/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py +++ b/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py @@ -1,9 +1,10 @@ from typing import List -from langflow.interface.custom.custom_component import CustomComponent +from langchain_text_splitters import CharacterTextSplitter + +from langflow.custom import CustomComponent from langflow.schema.schema import Record from langflow.utils.util import unescape_string -from langchain_text_splitters import CharacterTextSplitter class CharacterTextSplitterComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py b/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py index 19dc94686..7ef7d5c24 100644 --- a/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py +++ b/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py @@ -1,9 +1,10 @@ from typing import List, Optional -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record from langchain_text_splitters import Language, RecursiveCharacterTextSplitter +from langflow.custom import CustomComponent +from langflow.schema.schema import Record + class LanguageRecursiveTextSplitterComponent(CustomComponent): display_name: str = "Language Recursive Text Splitter" diff --git a/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py index 2bcde2232..77fcfa62a 100644 --- a/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py +++ b/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py @@ -1,10 +1,11 @@ from typing import Optional -from langchain_core.documents import Document -from langflow.interface.custom.custom_component import CustomComponent +from langchain_core.documents import Document +from langchain_text_splitters import RecursiveCharacterTextSplitter + +from langflow.custom import CustomComponent from langflow.schema import Record from langflow.utils.util import build_loader_repr_from_records, unescape_string -from langchain_text_splitters import RecursiveCharacterTextSplitter class RecursiveCharacterTextSplitterComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/toolkits/JsonToolkit.py b/src/backend/base/langflow/components/toolkits/JsonToolkit.py index 72fe17cde..09a613336 100644 --- a/src/backend/base/langflow/components/toolkits/JsonToolkit.py +++ b/src/backend/base/langflow/components/toolkits/JsonToolkit.py @@ -1,7 +1,10 @@ +from pathlib import Path + +import yaml from langchain_community.agent_toolkits.json.toolkit import JsonToolkit from langchain_community.tools.json.tool import JsonSpec -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class JsonToolkitComponent(CustomComponent): @@ -10,8 +13,17 @@ class JsonToolkitComponent(CustomComponent): def build_config(self): return { - "spec": {"display_name": "Spec", "type": JsonSpec}, + "path": { + "display_name": "Path", + "field_type": "file", + "file_types": ["json", "yaml", "yml"], + }, } - def build(self, spec: JsonSpec) -> JsonToolkit: + def build(self, path: str) -> JsonToolkit: + if path.endswith("yaml") or path.endswith("yml"): + yaml_dict = yaml.load(open(path, "r"), Loader=yaml.FullLoader) + spec = JsonSpec(dict_=yaml_dict) + else: + spec = JsonSpec.from_file(Path(path)) return JsonToolkit(spec=spec) diff --git a/src/backend/base/langflow/components/toolkits/Metaphor.py b/src/backend/base/langflow/components/toolkits/Metaphor.py index ba63416fb..9ebd4f771 100644 --- a/src/backend/base/langflow/components/toolkits/Metaphor.py +++ b/src/backend/base/langflow/components/toolkits/Metaphor.py @@ -1,9 +1,10 @@ from typing import List, Union -from metaphor_python import Metaphor # type: ignore -from langflow.interface.custom.custom_component import CustomComponent from langchain_community.agent_toolkits.base import BaseToolkit from langchain_core.tools import Tool, tool +from metaphor_python import Metaphor # type: ignore + +from langflow.custom import CustomComponent class MetaphorToolkit(CustomComponent): diff --git a/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py b/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py index b29feb291..a24798cef 100644 --- a/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py +++ b/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py @@ -1,8 +1,12 @@ +from pathlib import Path + +import yaml from langchain_community.agent_toolkits.openapi.toolkit import BaseToolkit, OpenAPIToolkit +from langchain_community.tools.json.tool import JsonSpec from langchain_community.utilities.requests import TextRequestsWrapper -from langflow.field_typing import AgentExecutor -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent +from langflow.field_typing import BaseLanguageModel class OpenAPIToolkitComponent(CustomComponent): @@ -15,9 +19,16 @@ class OpenAPIToolkitComponent(CustomComponent): "requests_wrapper": {"display_name": "Text Requests Wrapper"}, } - def build( - self, - json_agent: AgentExecutor, - requests_wrapper: TextRequestsWrapper, - ) -> BaseToolkit: - return OpenAPIToolkit(json_agent=json_agent, requests_wrapper=requests_wrapper) + def build(self, llm: BaseLanguageModel, path: str, allow_dangerous_requests: bool = False) -> BaseToolkit: + if path.endswith("yaml") or path.endswith("yml"): + yaml_dict = yaml.load(open(path, "r"), Loader=yaml.FullLoader) + spec = JsonSpec(dict_=yaml_dict) + else: + spec = JsonSpec.from_file(Path(path)) + requests_wrapper = TextRequestsWrapper() + return OpenAPIToolkit.from_llm( + llm=llm, + json_spec=spec, + requests_wrapper=requests_wrapper, + allow_dangerous_requests=allow_dangerous_requests, + ) diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py index 78f00dc40..60bd6598e 100644 --- a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py +++ b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py @@ -1,8 +1,8 @@ from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo - -from langflow.interface.custom.custom_component import CustomComponent from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent + class VectorStoreInfoComponent(CustomComponent): display_name = "VectorStoreInfo" diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py index 13ec1656f..13fff14a2 100644 --- a/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py +++ b/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py @@ -2,8 +2,8 @@ from typing import List, Union from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo, VectorStoreRouterToolkit +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, Tool -from langflow.interface.custom.custom_component import CustomComponent class VectorStoreRouterToolkitComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py index 8436ba58e..2f788fcb9 100644 --- a/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py +++ b/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py @@ -2,8 +2,8 @@ from typing import Union from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo, VectorStoreToolkit +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel, Tool -from langflow.interface.custom.custom_component import CustomComponent class VectorStoreToolkitComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/tools/RetrieverTool.py b/src/backend/base/langflow/components/tools/RetrieverTool.py index 914ba3941..28829321e 100644 --- a/src/backend/base/langflow/components/tools/RetrieverTool.py +++ b/src/backend/base/langflow/components/tools/RetrieverTool.py @@ -1,7 +1,7 @@ from langchain.tools.retriever import create_retriever_tool +from langflow.custom import CustomComponent from langflow.field_typing import BaseRetriever, Tool -from langflow.interface.custom.custom_component import CustomComponent class RetrieverToolComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py b/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py index 3e4b7880d..228e100e4 100644 --- a/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py +++ b/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py @@ -1,7 +1,8 @@ from typing import List, Optional -import chromadb # type: ignore -from langchain_community.vectorstores.chroma import Chroma +import chromadb +from chromadb.config import Settings +from langchain_chroma import Chroma from langflow.components.vectorstores.base.model import LCVectorStoreComponent from langflow.field_typing import Embeddings, Text @@ -39,7 +40,7 @@ class ChromaSearchComponent(LCVectorStoreComponent): "advanced": True, }, "chroma_server_host": {"display_name": "Server Host", "advanced": True}, - "chroma_server_port": {"display_name": "Server Port", "advanced": True}, + "chroma_server_http_port": {"display_name": "Server HTTP Port", "advanced": True}, "chroma_server_grpc_port": { "display_name": "Server gRPC Port", "advanced": True, @@ -64,48 +65,50 @@ class ChromaSearchComponent(LCVectorStoreComponent): chroma_server_ssl_enabled: bool, number_of_results: int = 4, index_directory: Optional[str] = None, - chroma_server_cors_allow_origins: Optional[str] = None, + chroma_server_cors_allow_origins: List[str] = [], chroma_server_host: Optional[str] = None, - chroma_server_port: Optional[int] = None, + chroma_server_http_port: Optional[int] = None, chroma_server_grpc_port: Optional[int] = None, ) -> List[Record]: """ Builds the Vector Store or BaseRetriever object. Args: + - input_value (Text): The input value. + - search_type (str): The type of search. - collection_name (str): The name of the collection. - - persist_directory (Optional[str]): The directory to persist the Vector Store to. + - embedding (Embeddings): The embeddings to use for the Vector Store. - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server. - - persist (bool): Whether to persist the Vector Store or not. - - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store. - - documents (Optional[Document]): The documents to use for the Vector Store. - - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server. - - chroma_server_host (Optional[str]): The host for the Chroma server. - - chroma_server_port (Optional[int]): The port for the Chroma server. - - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server. + - number_of_results (int, optional): The number of results to retrieve. Defaults to 4. + - index_directory (str, optional): The directory to persist the Vector Store to. Defaults to None. + - chroma_server_cors_allow_origins (List[str], optional): The CORS allow origins for the Chroma server. Defaults to []. + - chroma_server_host (str, optional): The host for the Chroma server. Defaults to None. + - chroma_server_http_port (int, optional): The HTTP port for the Chroma server. Defaults to None. + - chroma_server_grpc_port (int, optional): The gRPC port for the Chroma server. Defaults to None. Returns: - - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object. + - List[Record]: The list of records. """ # Chroma settings chroma_settings = None - + client = None if chroma_server_host is not None: - chroma_settings = chromadb.config.Settings( - chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None, + chroma_settings = Settings( + chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or [], chroma_server_host=chroma_server_host, - chroma_server_port=chroma_server_port or None, + chroma_server_http_port=chroma_server_http_port or None, chroma_server_grpc_port=chroma_server_grpc_port or None, chroma_server_ssl_enabled=chroma_server_ssl_enabled, ) + client = chromadb.HttpClient(settings=chroma_settings) if index_directory: index_directory = self.resolve_path(index_directory) vector_store = Chroma( embedding_function=embedding, collection_name=collection_name, persist_directory=index_directory, - client_settings=chroma_settings, + client=client, ) return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results) diff --git a/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py b/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py index d4818f354..e995f86f8 100644 --- a/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py +++ b/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py @@ -26,7 +26,7 @@ class PineconeSearchComponent(PineconeComponent, LCVectorStoreComponent): "input_value": {"display_name": "Input"}, "embedding": {"display_name": "Embedding"}, "index_name": {"display_name": "Index Name"}, - "namespace": {"display_name": "Namespace", "advanced": True}, + "namespace": {"display_name": "Namespace", "info": "Namespace for the index."}, "distance_strategy": { "display_name": "Distance Strategy", # get values from enum diff --git a/src/backend/base/langflow/components/vectorsearch/UpstashSearch.py b/src/backend/base/langflow/components/vectorsearch/UpstashSearch.py new file mode 100644 index 000000000..506896e2b --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/UpstashSearch.py @@ -0,0 +1,79 @@ +from typing import List, Optional + +from langchain_core.embeddings import Embeddings + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.Upstash import UpstashVectorStoreComponent +from langflow.field_typing import Text +from langflow.schema import Record + + +class UpstashSearchComponent(UpstashVectorStoreComponent, LCVectorStoreComponent): + """ + A custom component for implementing a Vector Store using Upstash. + """ + + display_name: str = "Upstash Search" + description: str = "Search an Upstash Vector Store for similar documents." + + def build_config(self): + """ + Builds the configuration for the component. + + Returns: + - dict: A dictionary containing the configuration options for the component. + """ + return { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, + "embedding": { + "display_name": "Embedding", + "input_types": ["Embeddings"], + "info": "To use Upstash's embeddings, don't provide an embedding.", + }, + "index_url": { + "display_name": "Index URL", + "info": "The URL of the Upstash index.", + }, + "index_token": { + "display_name": "Index Token", + "info": "The token for the Upstash index.", + }, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + "text_key": { + "display_name": "Text Key", + "info": "The key in the record to use as text.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + search_type: str, + text_key: str = "text", + index_url: Optional[str] = None, + index_token: Optional[str] = None, + embedding: Optional[Embeddings] = None, + number_of_results: int = 4, + ) -> List[Record]: + vector_store = super().build( + embedding=embedding, + text_key=text_key, + index_url=index_url, + index_token=index_token, + ) + if not vector_store: + raise ValueError("Failed to load the Upstash Vector Store.") + + return self.search_with_vector_store( + input_value=input_value, search_type=search_type, vector_store=vector_store, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/__init__.py b/src/backend/base/langflow/components/vectorsearch/__init__.py index 4cdf5b83c..83ce34b26 100644 --- a/src/backend/base/langflow/components/vectorsearch/__init__.py +++ b/src/backend/base/langflow/components/vectorsearch/__init__.py @@ -9,7 +9,7 @@ from .SupabaseVectorStoreSearch import SupabaseSearchComponent from .VectaraSearch import VectaraSearchComponent from .WeaviateSearch import WeaviateSearchVectorStore from .pgvectorSearch import PGVectorSearchComponent -from .Couchbase import CouchbaseSearchComponent # type: ignore +from .Couchbase import CouchbaseSearchComponent # type: ignore __all__ = [ "AstraDBSearchComponent", diff --git a/src/backend/base/langflow/components/vectorstores/Chroma.py b/src/backend/base/langflow/components/vectorstores/Chroma.py index 8ea943a61..3671dbbdb 100644 --- a/src/backend/base/langflow/components/vectorstores/Chroma.py +++ b/src/backend/base/langflow/components/vectorstores/Chroma.py @@ -1,14 +1,15 @@ from typing import List, Optional, Union -import chromadb # type: ignore -from langchain_community.vectorstores.chroma import Chroma - -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record +import chromadb +from chromadb.config import Settings +from langchain_chroma import Chroma from langchain_core.embeddings import Embeddings from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent +from langflow.schema.schema import Record + class ChromaComponent(CustomComponent): """ @@ -38,7 +39,7 @@ class ChromaComponent(CustomComponent): "advanced": True, }, "chroma_server_host": {"display_name": "Server Host", "advanced": True}, - "chroma_server_port": {"display_name": "Server Port", "advanced": True}, + "chroma_server_http_port": {"display_name": "Server HTTP Port", "advanced": True}, "chroma_server_grpc_port": { "display_name": "Server gRPC Port", "advanced": True, @@ -56,9 +57,9 @@ class ChromaComponent(CustomComponent): chroma_server_ssl_enabled: bool, index_directory: Optional[str] = None, inputs: Optional[List[Record]] = None, - chroma_server_cors_allow_origins: Optional[str] = None, + chroma_server_cors_allow_origins: List[str] = [], chroma_server_host: Optional[str] = None, - chroma_server_port: Optional[int] = None, + chroma_server_http_port: Optional[int] = None, chroma_server_grpc_port: Optional[int] = None, ) -> Union[VectorStore, BaseRetriever]: """ @@ -66,13 +67,13 @@ class ChromaComponent(CustomComponent): Args: - collection_name (str): The name of the collection. - - index_directory (Optional[str]): The directory to persist the Vector Store to. + - embedding (Embeddings): The embeddings to use for the Vector Store. - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server. - - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store. - - documents (Optional[Document]): The documents to use for the Vector Store. - - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server. + - index_directory (Optional[str]): The directory to persist the Vector Store to. + - inputs (Optional[List[Record]]): The input records to use for the Vector Store. + - chroma_server_cors_allow_origins (List[str]): The CORS allow origins for the Chroma server. - chroma_server_host (Optional[str]): The host for the Chroma server. - - chroma_server_port (Optional[int]): The port for the Chroma server. + - chroma_server_http_port (Optional[int]): The HTTP port for the Chroma server. - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server. Returns: @@ -81,15 +82,16 @@ class ChromaComponent(CustomComponent): # Chroma settings chroma_settings = None - + client = None if chroma_server_host is not None: - chroma_settings = chromadb.config.Settings( - chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None, + chroma_settings = Settings( + chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or [], chroma_server_host=chroma_server_host, - chroma_server_port=chroma_server_port or None, + chroma_server_http_port=chroma_server_http_port or None, chroma_server_grpc_port=chroma_server_grpc_port or None, chroma_server_ssl_enabled=chroma_server_ssl_enabled, ) + client = chromadb.HttpClient(settings=chroma_settings) # If documents, then we need to create a Chroma instance using .from_documents @@ -111,12 +113,12 @@ class ChromaComponent(CustomComponent): persist_directory=index_directory, collection_name=collection_name, embedding=embedding, - client_settings=chroma_settings, + client=client, ) else: chroma = Chroma( persist_directory=index_directory, - client_settings=chroma_settings, + client=client, embedding_function=embedding, ) return chroma diff --git a/src/backend/base/langflow/components/vectorstores/FAISS.py b/src/backend/base/langflow/components/vectorstores/FAISS.py index 410ac6a87..9d9624919 100644 --- a/src/backend/base/langflow/components/vectorstores/FAISS.py +++ b/src/backend/base/langflow/components/vectorstores/FAISS.py @@ -1,12 +1,13 @@ from typing import List, Text, Union -from langchain_community.vectorstores.faiss import FAISS -from langflow.field_typing import Embeddings -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record +from langchain_community.vectorstores.faiss import FAISS from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent +from langflow.field_typing import Embeddings +from langflow.schema.schema import Record + class FAISSComponent(CustomComponent): display_name = "FAISS" diff --git a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py index 6c800957a..8c045a1bd 100644 --- a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py +++ b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py @@ -1,8 +1,9 @@ from typing import List, Optional from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch + +from langflow.custom import CustomComponent from langflow.field_typing import Embeddings -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema.schema import Record diff --git a/src/backend/base/langflow/components/vectorstores/Pinecone.py b/src/backend/base/langflow/components/vectorstores/Pinecone.py index 31521dc10..2bc0e2252 100644 --- a/src/backend/base/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/base/langflow/components/vectorstores/Pinecone.py @@ -1,13 +1,14 @@ from typing import List, Optional, Union + from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever +from langchain_core.vectorstores import VectorStore from langchain_pinecone._utilities import DistanceStrategy from langchain_pinecone.vectorstores import PineconeVectorStore +from langflow.custom import CustomComponent from langflow.field_typing import Embeddings -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema.schema import Record -from langchain_core.retrievers import BaseRetriever -from langchain_core.vectorstores import VectorStore class PineconeComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/vectorstores/Qdrant.py b/src/backend/base/langflow/components/vectorstores/Qdrant.py index 200d22770..794e282db 100644 --- a/src/backend/base/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/base/langflow/components/vectorstores/Qdrant.py @@ -1,12 +1,13 @@ from typing import Optional, Union -from langchain_community.vectorstores.qdrant import Qdrant -from langflow.field_typing import Embeddings -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record +from langchain_community.vectorstores.qdrant import Qdrant from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent +from langflow.field_typing import Embeddings +from langflow.schema.schema import Record + class QdrantComponent(CustomComponent): display_name = "Qdrant" @@ -66,22 +67,19 @@ class QdrantComponent(CustomComponent): documents.append(_input.to_lc_document()) else: documents.append(_input) - if documents is None: + if not documents: from qdrant_client import QdrantClient client = QdrantClient( location=location, - url=host, + url=url, port=port, grpc_port=grpc_port, https=https, prefix=prefix, timeout=timeout, prefer_grpc=prefer_grpc, - metadata_payload_key=metadata_payload_key, - content_payload_key=content_payload_key, api_key=api_key, - collection_name=collection_name, host=host, path=path, ) @@ -89,6 +87,8 @@ class QdrantComponent(CustomComponent): client=client, collection_name=collection_name, embeddings=embedding, + content_payload_key=content_payload_key, + metadata_payload_key=metadata_payload_key, ) return vs else: diff --git a/src/backend/base/langflow/components/vectorstores/Redis.py b/src/backend/base/langflow/components/vectorstores/Redis.py index c72c11f4d..04d137538 100644 --- a/src/backend/base/langflow/components/vectorstores/Redis.py +++ b/src/backend/base/langflow/components/vectorstores/Redis.py @@ -1,12 +1,13 @@ from typing import Optional, Union -from langchain_community.vectorstores.redis import Redis -from langchain_core.retrievers import BaseRetriever -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record +from langchain_community.vectorstores.redis import Redis from langchain_core.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent +from langflow.schema.schema import Record + class RedisComponent(CustomComponent): """ diff --git a/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py index 71bf78ec8..5e87a09ca 100644 --- a/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py @@ -1,12 +1,13 @@ from typing import List, Optional, Union -from langchain_community.vectorstores.supabase import SupabaseVectorStore -from supabase.client import Client, create_client -from langflow.field_typing import Embeddings -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record +from langchain_community.vectorstores.supabase import SupabaseVectorStore from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from supabase.client import Client, create_client + +from langflow.custom import CustomComponent +from langflow.field_typing import Embeddings +from langflow.schema.schema import Record class SupabaseComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/vectorstores/Upstash.py b/src/backend/base/langflow/components/vectorstores/Upstash.py new file mode 100644 index 000000000..c066d7f44 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/Upstash.py @@ -0,0 +1,89 @@ +from typing import List, Optional, Union + +from langchain_community.vectorstores.upstash import UpstashVectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever +from langchain_core.vectorstores import VectorStore + +from langflow.custom import CustomComponent +from langflow.schema.schema import Record + + +class UpstashVectorStoreComponent(CustomComponent): + """ + A custom component for implementing a Vector Store using Upstash. + """ + + display_name: str = "Upstash" + description: str = "Create and Utilize an Upstash Vector Store" + + def build_config(self): + """ + Builds the configuration for the component. + + Returns: + - dict: A dictionary containing the configuration options for the component. + """ + return { + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, + "embedding": { + "display_name": "Embedding", + "input_types": ["Embeddings"], + "info": "To use Upstash's embeddings, don't provide an embedding.", + }, + "index_url": { + "display_name": "Index URL", + "info": "The URL of the Upstash index.", + }, + "index_token": { + "display_name": "Index Token", + "info": "The token for the Upstash index.", + }, + "text_key": { + "display_name": "Text Key", + "info": "The key in the record to use as text.", + "advanced": True, + }, + } + + def build( + self, + inputs: Optional[List[Record]] = None, + text_key: str = "text", + index_url: Optional[str] = None, + index_token: Optional[str] = None, + embedding: Optional[Embeddings] = None, + ) -> Union[VectorStore, BaseRetriever]: + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + use_upstash_embedding = embedding is None + if not documents: + upstash_vs = UpstashVectorStore( + embedding=embedding or use_upstash_embedding, + text_key=text_key, + index_url=index_url, + index_token=index_token, + ) + else: + if use_upstash_embedding: + upstash_vs = UpstashVectorStore( + embedding=use_upstash_embedding, + text_key=text_key, + index_url=index_url, + index_token=index_token, + ) + upstash_vs.add_documents(documents) + elif embedding: + upstash_vs = UpstashVectorStore.from_documents( + documents=documents, # type: ignore + embedding=embedding, + text_key=text_key, + index_url=index_url, + index_token=index_token, + ) + return upstash_vs diff --git a/src/backend/base/langflow/components/vectorstores/Vectara.py b/src/backend/base/langflow/components/vectorstores/Vectara.py index 5c087875f..247614345 100644 --- a/src/backend/base/langflow/components/vectorstores/Vectara.py +++ b/src/backend/base/langflow/components/vectorstores/Vectara.py @@ -7,8 +7,8 @@ from langchain_community.embeddings import FakeEmbeddings from langchain_community.vectorstores.vectara import Vectara from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent from langflow.field_typing import BaseRetriever -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema.schema import Record diff --git a/src/backend/base/langflow/components/vectorstores/Weaviate.py b/src/backend/base/langflow/components/vectorstores/Weaviate.py index 108c5a5da..e1a802000 100644 --- a/src/backend/base/langflow/components/vectorstores/Weaviate.py +++ b/src/backend/base/langflow/components/vectorstores/Weaviate.py @@ -3,13 +3,13 @@ from typing import Optional, Union import weaviate # type: ignore from langchain_community.vectorstores import Weaviate from langchain_core.documents import Document - -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record from langchain_core.embeddings import Embeddings from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent +from langflow.schema.schema import Record + class WeaviateVectorStoreComponent(CustomComponent): display_name: str = "Weaviate" diff --git a/src/backend/base/langflow/components/vectorstores/base/model.py b/src/backend/base/langflow/components/vectorstores/base/model.py index 668c5eff2..18a37c9cf 100644 --- a/src/backend/base/langflow/components/vectorstores/base/model.py +++ b/src/backend/base/langflow/components/vectorstores/base/model.py @@ -4,9 +4,9 @@ from langchain_core.documents import Document from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent from langflow.field_typing import Text from langflow.helpers.record import docs_to_records -from langflow.interface.custom.custom_component import CustomComponent from langflow.schema import Record diff --git a/src/backend/base/langflow/components/vectorstores/pgvector.py b/src/backend/base/langflow/components/vectorstores/pgvector.py index 1c46d1e51..75c833ded 100644 --- a/src/backend/base/langflow/components/vectorstores/pgvector.py +++ b/src/backend/base/langflow/components/vectorstores/pgvector.py @@ -1,12 +1,13 @@ from typing import Optional, Union -from langchain_community.vectorstores.pgvector import PGVector -from langchain_core.retrievers import BaseRetriever -from langflow.interface.custom.custom_component import CustomComponent -from langflow.schema.schema import Record +from langchain_community.vectorstores.pgvector import PGVector from langchain_core.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain_core.vectorstores import VectorStore +from langflow.custom import CustomComponent +from langflow.schema.schema import Record + class PGVectorComponent(CustomComponent): """ diff --git a/src/backend/base/langflow/custom.py b/src/backend/base/langflow/custom.py deleted file mode 100644 index ebe4c04e8..000000000 --- a/src/backend/base/langflow/custom.py +++ /dev/null @@ -1 +0,0 @@ -from langflow.interface.custom.custom_component import CustomComponent # noqa: F401 diff --git a/src/backend/base/langflow/custom/__init__.py b/src/backend/base/langflow/custom/__init__.py new file mode 100644 index 000000000..bd789498a --- /dev/null +++ b/src/backend/base/langflow/custom/__init__.py @@ -0,0 +1,3 @@ +from langflow.custom.custom_component import CustomComponent + +__all__ = ["CustomComponent"] diff --git a/src/backend/base/langflow/interface/custom/attributes.py b/src/backend/base/langflow/custom/attributes.py similarity index 100% rename from src/backend/base/langflow/interface/custom/attributes.py rename to src/backend/base/langflow/custom/attributes.py diff --git a/src/backend/base/langflow/interface/custom/code_parser/__init__.py b/src/backend/base/langflow/custom/code_parser/__init__.py similarity index 100% rename from src/backend/base/langflow/interface/custom/code_parser/__init__.py rename to src/backend/base/langflow/custom/code_parser/__init__.py diff --git a/src/backend/base/langflow/interface/custom/code_parser/code_parser.py b/src/backend/base/langflow/custom/code_parser/code_parser.py similarity index 98% rename from src/backend/base/langflow/interface/custom/code_parser/code_parser.py rename to src/backend/base/langflow/custom/code_parser/code_parser.py index 44dbbc1d9..17fe12896 100644 --- a/src/backend/base/langflow/interface/custom/code_parser/code_parser.py +++ b/src/backend/base/langflow/custom/code_parser/code_parser.py @@ -8,8 +8,8 @@ from cachetools import TTLCache, cachedmethod, keys from fastapi import HTTPException from loguru import logger -from langflow.interface.custom.eval import eval_custom_component_code -from langflow.interface.custom.schema import CallableCodeDetails, ClassCodeDetails, MissingDefault +from langflow.custom.eval import eval_custom_component_code +from langflow.custom.schema import CallableCodeDetails, ClassCodeDetails, MissingDefault class CodeSyntaxError(HTTPException): diff --git a/src/backend/base/langflow/interface/custom/code_parser/utils.py b/src/backend/base/langflow/custom/code_parser/utils.py similarity index 100% rename from src/backend/base/langflow/interface/custom/code_parser/utils.py rename to src/backend/base/langflow/custom/code_parser/utils.py diff --git a/src/backend/base/langflow/interface/custom/custom_component/__init__.py b/src/backend/base/langflow/custom/custom_component/__init__.py similarity index 100% rename from src/backend/base/langflow/interface/custom/custom_component/__init__.py rename to src/backend/base/langflow/custom/custom_component/__init__.py diff --git a/src/backend/base/langflow/interface/custom/custom_component/component.py b/src/backend/base/langflow/custom/custom_component/component.py similarity index 93% rename from src/backend/base/langflow/interface/custom/custom_component/component.py rename to src/backend/base/langflow/custom/custom_component/component.py index 470ebcde8..d45b5daed 100644 --- a/src/backend/base/langflow/interface/custom/custom_component/component.py +++ b/src/backend/base/langflow/custom/custom_component/component.py @@ -5,9 +5,9 @@ from typing import Any, ClassVar, Optional from cachetools import TTLCache, cachedmethod from fastapi import HTTPException -from langflow.interface.custom.attributes import ATTR_FUNC_MAPPING -from langflow.interface.custom.code_parser import CodeParser -from langflow.interface.custom.eval import eval_custom_component_code +from langflow.custom.attributes import ATTR_FUNC_MAPPING +from langflow.custom.code_parser import CodeParser +from langflow.custom.eval import eval_custom_component_code from langflow.utils import validate diff --git a/src/backend/base/langflow/interface/custom/custom_component/custom_component.py b/src/backend/base/langflow/custom/custom_component/custom_component.py similarity index 99% rename from src/backend/base/langflow/interface/custom/custom_component/custom_component.py rename to src/backend/base/langflow/custom/custom_component/custom_component.py index 1638ebb2c..aeac9cae6 100644 --- a/src/backend/base/langflow/interface/custom/custom_component/custom_component.py +++ b/src/backend/base/langflow/custom/custom_component/custom_component.py @@ -7,13 +7,12 @@ import yaml from cachetools import TTLCache, cachedmethod from langchain_core.documents import Document from pydantic import BaseModel - -from langflow.helpers.flow import list_flows, load_flow, run_flow -from langflow.interface.custom.code_parser.utils import ( +from langflow.custom.code_parser.utils import ( extract_inner_type_from_generic_alias, extract_union_types_from_generic_alias, ) -from langflow.interface.custom.custom_component.component import Component +from langflow.custom.custom_component.component import Component +from langflow.helpers.flow import list_flows, load_flow, run_flow from langflow.schema import Record from langflow.schema.dotdict import dotdict from langflow.services.deps import get_storage_service, get_variable_service, session_scope diff --git a/src/backend/base/langflow/interface/custom/directory_reader/__init__.py b/src/backend/base/langflow/custom/directory_reader/__init__.py similarity index 100% rename from src/backend/base/langflow/interface/custom/directory_reader/__init__.py rename to src/backend/base/langflow/custom/directory_reader/__init__.py diff --git a/src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py b/src/backend/base/langflow/custom/directory_reader/directory_reader.py similarity index 98% rename from src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py rename to src/backend/base/langflow/custom/directory_reader/directory_reader.py index e9f3f6ceb..b9f55f21f 100644 --- a/src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py +++ b/src/backend/base/langflow/custom/directory_reader/directory_reader.py @@ -5,7 +5,7 @@ from pathlib import Path from loguru import logger -from langflow.interface.custom.custom_component import CustomComponent +from langflow.custom import CustomComponent class CustomComponentPathValueError(ValueError): @@ -67,7 +67,7 @@ class DirectoryReader: return len(file_content.strip()) == 0 def filter_loaded_components(self, data: dict, with_errors: bool) -> dict: - from langflow.interface.custom.utils import build_component + from langflow.custom.utils import build_component items = [] for menu in data["menu"]: diff --git a/src/backend/base/langflow/interface/custom/directory_reader/utils.py b/src/backend/base/langflow/custom/directory_reader/utils.py similarity index 98% rename from src/backend/base/langflow/interface/custom/directory_reader/utils.py rename to src/backend/base/langflow/custom/directory_reader/utils.py index 2772cb78c..ddd24d8f3 100644 --- a/src/backend/base/langflow/interface/custom/directory_reader/utils.py +++ b/src/backend/base/langflow/custom/directory_reader/utils.py @@ -1,6 +1,6 @@ from loguru import logger -from langflow.interface.custom.directory_reader import DirectoryReader +from langflow.custom.directory_reader import DirectoryReader from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode diff --git a/src/backend/base/langflow/interface/custom/eval.py b/src/backend/base/langflow/custom/eval.py similarity index 80% rename from src/backend/base/langflow/interface/custom/eval.py rename to src/backend/base/langflow/custom/eval.py index b36f10d92..baa202402 100644 --- a/src/backend/base/langflow/interface/custom/eval.py +++ b/src/backend/base/langflow/custom/eval.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Type from langflow.utils import validate if TYPE_CHECKING: - from langflow.interface.custom.custom_component import CustomComponent + from langflow.custom import CustomComponent def eval_custom_component_code(code: str) -> Type["CustomComponent"]: diff --git a/src/backend/base/langflow/interface/custom/schema.py b/src/backend/base/langflow/custom/schema.py similarity index 100% rename from src/backend/base/langflow/interface/custom/schema.py rename to src/backend/base/langflow/custom/schema.py diff --git a/src/backend/base/langflow/interface/custom/utils.py b/src/backend/base/langflow/custom/utils.py similarity index 97% rename from src/backend/base/langflow/interface/custom/utils.py rename to src/backend/base/langflow/custom/utils.py index 2935a363a..93f08f633 100644 --- a/src/backend/base/langflow/interface/custom/utils.py +++ b/src/backend/base/langflow/custom/utils.py @@ -10,17 +10,17 @@ from fastapi import HTTPException from loguru import logger from pydantic import BaseModel -from langflow.field_typing.range_spec import RangeSpec -from langflow.interface.custom.attributes import ATTR_FUNC_MAPPING -from langflow.interface.custom.code_parser.utils import extract_inner_type -from langflow.interface.custom.custom_component import CustomComponent -from langflow.interface.custom.directory_reader.utils import ( +from langflow.custom import CustomComponent +from langflow.custom.attributes import ATTR_FUNC_MAPPING +from langflow.custom.code_parser.utils import extract_inner_type +from langflow.custom.directory_reader.utils import ( build_custom_component_list_from_path, determine_component_name, merge_nested_dicts_with_renaming, ) -from langflow.interface.custom.eval import eval_custom_component_code -from langflow.interface.custom.schema import MissingDefault +from langflow.custom.eval import eval_custom_component_code +from langflow.custom.schema import MissingDefault +from langflow.field_typing.range_spec import RangeSpec from langflow.schema import dotdict from langflow.template.field.base import TemplateField from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode @@ -159,6 +159,11 @@ def add_new_custom_field( if field_type == "bool" and field_value is None: field_value = False + if field_type == "SecretStr": + field_config["password"] = True + field_config["load_from_db"] = True + field_config["input_types"] = ["Text"] + # If options is a list, then it's a dropdown # If options is None, then it's a list of strings is_list = isinstance(field_config.get("options"), list) diff --git a/src/backend/base/langflow/graph/__init__.py b/src/backend/base/langflow/graph/__init__.py index e80dcaa5f..bb93f92cf 100644 --- a/src/backend/base/langflow/graph/__init__.py +++ b/src/backend/base/langflow/graph/__init__.py @@ -1,39 +1,6 @@ from langflow.graph.edge.base import Edge from langflow.graph.graph.base import Graph from langflow.graph.vertex.base import Vertex -from langflow.graph.vertex.types import ( - AgentVertex, - ChainVertex, - CustomComponentVertex, - DocumentLoaderVertex, - EmbeddingVertex, - LLMVertex, - MemoryVertex, - PromptVertex, - RetrieverVertex, - TextSplitterVertex, - ToolkitVertex, - ToolVertex, - VectorStoreVertex, - WrapperVertex, -) +from langflow.graph.vertex.types import CustomComponentVertex, InterfaceVertex, StateVertex -__all__ = [ - "Graph", - "Vertex", - "Edge", - "AgentVertex", - "ChainVertex", - "DocumentLoaderVertex", - "EmbeddingVertex", - "LLMVertex", - "MemoryVertex", - "PromptVertex", - "TextSplitterVertex", - "ToolVertex", - "ToolkitVertex", - "VectorStoreVertex", - "WrapperVertex", - "RetrieverVertex", - "CustomComponentVertex", -] +__all__ = ["Edge", "Graph", "Vertex", "CustomComponentVertex", "InterfaceVertex", "StateVertex"] diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py index bd17d43f8..f4e71bd7c 100644 --- a/src/backend/base/langflow/graph/graph/base.py +++ b/src/backend/base/langflow/graph/graph/base.py @@ -14,11 +14,13 @@ from langflow.graph.graph.state_manager import GraphStateManager from langflow.graph.graph.utils import process_flow from langflow.graph.schema import InterfaceComponentTypes, RunOutputs from langflow.graph.vertex.base import Vertex -from langflow.graph.vertex.types import FileToolVertex, InterfaceVertex, LLMVertex, StateVertex, ToolkitVertex -from langflow.interface.tools.constants import FILE_TOOLS +from langflow.graph.vertex.types import InterfaceVertex, StateVertex from langflow.schema import Record from langflow.schema.schema import INPUT_FIELD_NAME, InputType +from langflow.services.cache.utils import CacheMiss +from langflow.services.chat.service import ChatService from langflow.services.deps import get_chat_service +from langflow.services.monitor.utils import log_transaction if TYPE_CHECKING: from langflow.graph.schema import ResultData @@ -687,16 +689,8 @@ class Graph: def _build_vertex_params(self) -> None: """Identifies and handles the LLM vertex within the graph.""" - llm_vertex = None for vertex in self.vertices: vertex._build_params() - if isinstance(vertex, LLMVertex): - llm_vertex = vertex - - if llm_vertex: - for vertex in self.vertices: - if isinstance(vertex, ToolkitVertex): - vertex.params["llm"] = llm_vertex def _validate_vertex(self, vertex: Vertex) -> bool: """Validates a vertex.""" @@ -713,7 +707,7 @@ class Graph: async def build_vertex( self, lock: asyncio.Lock, - set_cache_coro: Callable[["Graph", asyncio.Lock], Coroutine], + chat_service: ChatService, vertex_id: str, inputs_dict: Optional[Dict[str, str]] = None, user_id: Optional[str] = None, @@ -738,23 +732,43 @@ class Graph: """ vertex = self.get_vertex(vertex_id) try: - if not vertex.frozen or not vertex._built: + params = "" + if vertex.frozen: + # Check the cache for the vertex + cached_result = await chat_service.get_cache(key=vertex.id) + if isinstance(cached_result, CacheMiss): + await vertex.build(user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars) + await chat_service.set_cache(key=vertex.id, data=vertex) + else: + cached_vertex = cached_result["result"] + # Now set update the vertex with the cached vertex + vertex._built = cached_vertex._built + vertex.result = cached_vertex.result + vertex.artifacts = cached_vertex.artifacts + vertex._built_object = cached_vertex._built_object + vertex._custom_component = cached_vertex._custom_component + if vertex.result is not None: + vertex.result.used_frozen_result = True + + else: await vertex.build(user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars) if vertex.result is not None: - params = vertex._built_object_repr() + params = f"{vertex._built_object_repr()}{params}" valid = True result_dict = vertex.result artifacts = vertex.artifacts else: raise ValueError(f"No result found for vertex {vertex_id}") - + set_cache_coro = partial(chat_service.set_cache, key=self.flow_id) next_runnable_vertices, top_level_vertices = await self.get_next_and_top_level_vertices( lock, set_cache_coro, vertex ) + log_transaction(vertex, status="success") return next_runnable_vertices, top_level_vertices, result_dict, params, valid, artifacts, vertex except Exception as exc: logger.exception(f"Error building vertex: {exc}") + log_transaction(vertex, status="failure", error=str(exc)) raise exc async def get_next_and_top_level_vertices( @@ -819,11 +833,10 @@ class Graph: for vertex_id in current_batch: vertex = self.get_vertex(vertex_id) lock = chat_service._cache_locks[self.run_id] - set_cache_coro = partial(chat_service.set_cache, flow_id=self.run_id) task = asyncio.create_task( self.build_vertex( lock=lock, - set_cache_coro=set_cache_coro, + chat_service=chat_service, vertex_id=vertex_id, user_id=self.user_id, inputs_dict={}, @@ -1003,8 +1016,6 @@ class Graph: elif node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP: return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name] - if node_type in FILE_TOOLS: - return FileToolVertex if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type] return ( diff --git a/src/backend/base/langflow/graph/graph/constants.py b/src/backend/base/langflow/graph/graph/constants.py index 2ed42fbff..8f5840524 100644 --- a/src/backend/base/langflow/graph/graph/constants.py +++ b/src/backend/base/langflow/graph/graph/constants.py @@ -1,17 +1,5 @@ from langflow.graph.schema import CHAT_COMPONENTS from langflow.graph.vertex import types -from langflow.interface.agents.base import agent_creator -from langflow.interface.custom.base import custom_component_creator -from langflow.interface.document_loaders.base import documentloader_creator -from langflow.interface.embeddings.base import embedding_creator -from langflow.interface.llms.base import llm_creator -from langflow.interface.memories.base import memory_creator -from langflow.interface.prompts.base import prompt_creator -from langflow.interface.retrievers.base import retriever_creator -from langflow.interface.text_splitters.base import textsplitter_creator -from langflow.interface.toolkits.base import toolkits_creator -from langflow.interface.tools.base import tool_creator -from langflow.interface.wrappers.base import wrapper_creator from langflow.utils.lazy_load import LazyLoadDictBase @@ -32,20 +20,7 @@ class VertexTypesDict(LazyLoadDictBase): def get_type_dict(self): return { - **{t: types.PromptVertex for t in prompt_creator.to_list()}, - **{t: types.AgentVertex for t in agent_creator.to_list()}, - # **{t: types.ChainVertex for t in chain_creator.to_list()}, - **{t: types.ToolVertex for t in tool_creator.to_list()}, - **{t: types.ToolkitVertex for t in toolkits_creator.to_list()}, - **{t: types.WrapperVertex for t in wrapper_creator.to_list()}, - **{t: types.LLMVertex for t in llm_creator.to_list()}, - **{t: types.MemoryVertex for t in memory_creator.to_list()}, - **{t: types.EmbeddingVertex for t in embedding_creator.to_list()}, - # **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()}, - **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, - **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, - **{t: types.CustomComponentVertex for t in custom_component_creator.to_list()}, - **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, + **{t: types.CustomComponentVertex for t in ["CustomComponent"]}, **{t: types.InterfaceVertex for t in CHAT_COMPONENTS}, } diff --git a/src/backend/base/langflow/graph/schema.py b/src/backend/base/langflow/graph/schema.py index 18010de4c..60e7ab590 100644 --- a/src/backend/base/langflow/graph/schema.py +++ b/src/backend/base/langflow/graph/schema.py @@ -15,6 +15,7 @@ class ResultData(BaseModel): duration: Optional[str] = None component_display_name: Optional[str] = None component_id: Optional[str] = None + used_frozen_result: Optional[bool] = False @field_serializer("results") def serialize_results(self, value): diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py index e0dd0f4bf..963ae2115 100644 --- a/src/backend/base/langflow/graph/vertex/base.py +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -10,7 +10,7 @@ from loguru import logger from langflow.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData from langflow.graph.utils import UnbuiltObject, UnbuiltResult -from langflow.graph.vertex.utils import generate_result, log_transaction +from langflow.graph.vertex.utils import log_transaction from langflow.interface.initialize import loading from langflow.interface.listing import lazy_load_dict from langflow.schema.schema import INPUT_FIELD_NAME @@ -454,29 +454,6 @@ class Vertex: ) self.set_result(result_dict) - async def _run( - self, - user_id: str, - inputs: Optional[dict] = None, - session_id: Optional[str] = None, - ): - # user_id is just for compatibility with the other build methods - inputs = inputs or {} - # inputs = {key: value or "" for key, value in inputs.items()} - # if hasattr(self._built_object, "input_keys"): - # # test if all keys are in inputs - # # and if not add them with empty string - # # for key in self._built_object.input_keys: - # # if key not in inputs: - # # inputs[key] = "" - # if inputs == {} and hasattr(self._built_object, "prompt"): - # inputs = self._built_object.prompt.partial_variables - if isinstance(self._built_object, str): - self._built_result = self._built_object - - result = await generate_result(self._built_object, inputs, self.has_external_output, session_id) - self._built_result = result - async def _build_each_vertex_in_params_dict(self, user_id=None): """ Iterates over each vertex in the params dictionary and builds it. @@ -719,7 +696,8 @@ class Vertex: self._finalize_build() - return await self.get_requester_result(requester) + result = await self.get_requester_result(requester) + return result async def get_requester_result(self, requester: Optional["Vertex"]): # If the requester is None, this means that diff --git a/src/backend/base/langflow/graph/vertex/types.py b/src/backend/base/langflow/graph/vertex/types.py index 941f06036..590c38c24 100644 --- a/src/backend/base/langflow/graph/vertex/types.py +++ b/src/backend/base/langflow/graph/vertex/types.py @@ -1,15 +1,13 @@ -import ast import json -from typing import AsyncIterator, Callable, Dict, Iterator, List, Optional, Union +from typing import AsyncIterator, Dict, Iterator, List import yaml from langchain_core.messages import AIMessage from loguru import logger from langflow.graph.schema import CHAT_COMPONENTS, RECORDS_COMPONENTS, InterfaceComponentTypes -from langflow.graph.utils import UnbuiltObject, flatten_list, serialize_field +from langflow.graph.utils import UnbuiltObject, serialize_field from langflow.graph.vertex.base import Vertex -from langflow.interface.utils import extract_input_variables_from_prompt from langflow.schema import Record from langflow.schema.schema import INPUT_FIELD_NAME from langflow.services.monitor.utils import log_vertex_build @@ -17,289 +15,6 @@ from langflow.utils.schemas import ChatOutputResponse, RecordOutputResponse from langflow.utils.util import unescape_string -class AgentVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="agents", params=params) - - self.tools: List[Union[ToolkitVertex, ToolVertex]] = [] - self.chains: List[ChainVertex] = [] - self.steps: List[Callable] = [self._custom_build] - - def __getstate__(self): - state = super().__getstate__() - state["tools"] = self.tools - state["chains"] = self.chains - return state - - def __setstate__(self, state): - self.tools = state["tools"] - self.chains = state["chains"] - super().__setstate__(state) - - def _set_tools_and_chains(self) -> None: - for edge in self.edges: - if not hasattr(edge, "source"): - continue - source_node = edge.source - if isinstance(source_node, (ToolVertex, ToolkitVertex)): - self.tools.append(source_node) - elif isinstance(source_node, ChainVertex): - self.chains.append(source_node) - - async def _custom_build(self, *args, **kwargs): - user_id = kwargs.get("user_id", None) - self._set_tools_and_chains() - # First, build the tools - for tool_node in self.tools: - await tool_node.build(user_id=user_id) - - # Next, build the chains and the rest - for chain_node in self.chains: - await chain_node.build(tools=self.tools, user_id=user_id) - - await self._build(user_id=user_id) - - -class ToolVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="tools", params=params) - - -class LLMVertex(Vertex): - built_node_type = None - class_built_object = None - - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="models", params=params) - self.steps: List[Callable] = [self._custom_build] - - async def _custom_build(self, *args, **kwargs): - # LLM is different because some models might take up too much memory - # or time to load. So we only load them when we need them. - # Avoid deepcopying the LLM - # that are loaded from a file - force = kwargs.get("force", False) - user_id = kwargs.get("user_id", None) - if self.vertex_type == self.built_node_type: - self._built_object = self.class_built_object - if not self._built or force: - await self._build(user_id=user_id) - self.built_node_type = self.vertex_type - self.class_built_object = self._built_object - - -class ToolkitVertex(Vertex): - def __init__(self, data: Dict, graph, params=None): - super().__init__(data, graph=graph, base_type="toolkits", params=params) - - -class FileToolVertex(ToolVertex): - def __init__(self, data: Dict, graph, params=None): - super().__init__( - data, - params=params, - graph=graph, - ) - - -class WrapperVertex(Vertex): - def __init__(self, data: Dict, graph, params=None): - super().__init__(data, graph=graph, base_type="wrappers") - self.steps: List[Callable] = [self._custom_build] - - async def _custom_build(self, *args, **kwargs): - force = kwargs.get("force", False) - user_id = kwargs.get("user_id", None) - if not self._built or force: - if "headers" in self.params: - self.params["headers"] = ast.literal_eval(self.params["headers"]) - await self._build(user_id=user_id) - - -class DocumentLoaderVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="documentloaders", params=params) - - def _built_object_repr(self): - # This built_object is a list of documents. Maybe we should - # show how many documents are in the list? - - if not isinstance(self._built_object, UnbuiltObject): - avg_length = sum(len(record.get_text()) for record in self._built_object if hasattr(record, "text")) / len( - self._built_object - ) - return f"""{self.display_name}({len(self._built_object)} records) - \nAvg. Record Length (characters): {int(avg_length)} - Records: {self._built_object[:3]}...""" - return f"{self.vertex_type}()" - - -class EmbeddingVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="embeddings", params=params) - - -class VectorStoreVertex(Vertex): - def __init__(self, data: Dict, graph, params=None): - super().__init__(data, graph=graph, base_type="vectorstores") - - self.params = params or {} - - # VectorStores may contain databse connections - # so we need to define the __reduce__ method and the __setstate__ method - # to avoid pickling errors - def clean_edges_for_pickling(self): - # for each edge that has self as source - # we need to clear the _built_object of the target - # so that we don't try to pickle a database connection - for edge in self.edges: - if edge.source == self: - edge.target._built_object = None - edge.target._built = False - edge.target.params[edge.target_param] = self - - def remove_docs_and_texts_from_params(self): - # remove documents and texts from params - # so that we don't try to pickle a database connection - self.params.pop("documents", None) - self.params.pop("texts", None) - - def __getstate__(self): - # We want to save the params attribute - # and if "documents" or "texts" are in the params - # we want to remove them because they have already - # been processed. - params = self.params.copy() - params.pop("documents", None) - params.pop("texts", None) - self.clean_edges_for_pickling() - - return super().__getstate__() - - def __setstate__(self, state): - super().__setstate__(state) - self.remove_docs_and_texts_from_params() - - -class MemoryVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="memory") - - -class RetrieverVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="retrievers") - - -class TextSplitterVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="textsplitters", params=params) - - def _built_object_repr(self): - # This built_object is a list of documents. Maybe we should - # show how many documents are in the list? - - if not isinstance(self._built_object, UnbuiltObject): - avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(self._built_object) - return f"""{self.vertex_type}({len(self._built_object)} documents) - \nAvg. Document Length (characters): {int(avg_length)} - \nDocuments: {self._built_object[:3]}...""" - return f"{self.vertex_type}()" - - -class ChainVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="chains") - self.steps = [self._custom_build] - - async def _custom_build(self, *args, **kwargs): - force = kwargs.get("force", False) - user_id = kwargs.get("user_id", None) - # Remove this once LLMChain is CustomComponent - self.params.pop("code", None) - for key, value in self.params.items(): - if isinstance(value, PromptVertex): - # Build the PromptVertex, passing the tools if available - tools = kwargs.get("tools", None) - self.params[key] = value.build(tools=tools, frozen=force) - - await self._build(user_id=user_id) - - def set_artifacts(self) -> None: - if isinstance(self._built_object, UnbuiltObject): - return - if self._built_object and hasattr(self._built_object, "input_keys"): - self.artifacts = dict(input_keys=self._built_object.input_keys) - - def _built_object_repr(self): - if isinstance(self._built_object, str): - return self._built_object - return super()._built_object_repr() - - -class PromptVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="prompts") - self.steps: List[Callable] = [self._custom_build] - - async def _custom_build(self, *args, **kwargs): - force = kwargs.get("force", False) - user_id = kwargs.get("user_id", None) - tools = kwargs.get("tools", []) - if not self._built or force: - if "input_variables" not in self.params or self.params["input_variables"] is None: - self.params["input_variables"] = [] - # Check if it is a ZeroShotPrompt and needs a tool - if "ShotPrompt" in self.vertex_type: - tools = [tool_node.build(user_id=user_id) for tool_node in tools] if tools is not None else [] - # flatten the list of tools if it is a list of lists - # first check if it is a list - if tools and isinstance(tools, list) and isinstance(tools[0], list): - tools = flatten_list(tools) - self.params["tools"] = tools - prompt_params = [ - key for key, value in self.params.items() if isinstance(value, str) and key != "format_instructions" - ] - else: - prompt_params = ["template"] - - if "prompt" not in self.params and "messages" not in self.params: - for param in prompt_params: - prompt_text = self.params[param] - variables = extract_input_variables_from_prompt(prompt_text) - self.params["input_variables"].extend(variables) - self.params["input_variables"] = list(set(self.params["input_variables"])) - elif isinstance(self.params, dict): - self.params.pop("input_variables", None) - - await self._build(user_id=user_id) - - def _built_object_repr(self): - if not self.artifacts or self._built_object is None or not hasattr(self._built_object, "format"): - return super()._built_object_repr() - elif isinstance(self._built_object, UnbuiltObject): - return super()._built_object_repr() - # We'll build the prompt with the artifacts - # to show the user what the prompt looks like - # with the variables filled in - artifacts = self.artifacts.copy() - # Remove the handle_keys from the artifacts - # so the prompt format doesn't break - artifacts.pop("handle_keys", None) - try: - if not hasattr(self._built_object, "template") and hasattr(self._built_object, "prompt"): - template = self._built_object.prompt.template - else: - template = self._built_object.template - for key, value in artifacts.items(): - if value: - replace_key = "{" + key + "}" - template = template.replace(replace_key, value) - return template if isinstance(template, str) else f"{self.vertex_type}({template})" - except KeyError: - return str(self._built_object) - - class CustomComponentVertex(Vertex): def __init__(self, data: Dict, graph): super().__init__(data, graph=graph, base_type="custom_components") diff --git a/src/backend/base/langflow/graph/vertex/utils.py b/src/backend/base/langflow/graph/vertex/utils.py index b978424f2..59a1c1949 100644 --- a/src/backend/base/langflow/graph/vertex/utils.py +++ b/src/backend/base/langflow/graph/vertex/utils.py @@ -1,74 +1,13 @@ -from typing import Any, Optional, Union, TYPE_CHECKING +from typing import TYPE_CHECKING -from langchain_core.messages import BaseMessage -from langchain_core.runnables import Runnable from loguru import logger from langflow.services.deps import get_monitor_service -from langflow.utils.constants import PYTHON_BASIC_TYPES if TYPE_CHECKING: from langflow.graph.vertex.base import Vertex -def is_basic_type(obj): - return type(obj) in PYTHON_BASIC_TYPES - - -async def invoke_lc_runnable( - built_object: Runnable, inputs: dict, has_external_output: bool, session_id: Optional[str] = None, **kwargs -) -> Union[str, BaseMessage]: - # Setup callbacks for asynchronous execution - from langflow.processing.base import setup_callbacks - - callbacks = setup_callbacks(sync=False, trace_id=session_id, **kwargs) - - try: - if has_external_output and hasattr(built_object, "astream"): - # Asynchronous stream handling if supported and required - output = "" - async for chunk in built_object.astream(inputs, {"callbacks": callbacks}): - output += chunk - return output - else: - # Direct asynchronous invocation - return await built_object.ainvoke(inputs, {"callbacks": callbacks}) - except Exception as async_exc: - logger.debug(f"Async error, falling back to sync: {str(async_exc)}") - - # Setup synchronous callbacks for the fallback - sync_callbacks = setup_callbacks(sync=True, trace_id=session_id, **kwargs) - try: - # Synchronous fallback if asynchronous execution fails - if has_external_output and hasattr(built_object, "stream"): - # Synchronous stream handling if supported and required - output = "" - for chunk in built_object.stream(inputs, {"callbacks": sync_callbacks}): - output += chunk - return output - else: - # Direct synchronous invocation - return built_object.invoke(inputs, {"callbacks": sync_callbacks}) - except Exception as sync_exc: - logger.error(f"Sync error after async failure: {str(sync_exc)}") - # Handle or re-raise exception as appropriate for your application - raise sync_exc from async_exc - - -async def generate_result(built_object: Any, inputs: dict, has_external_output: bool, session_id: Optional[str] = None): - # If the built_object is instance of Runnable - # we can call `invoke` or `stream` on it - # if it has_external_outputl, we need to call `stream` if it has it - # if not, we call `invoke` if it has it - if isinstance(built_object, Runnable): - result = await invoke_lc_runnable( - built_object=built_object, inputs=inputs, has_external_output=has_external_output, session_id=session_id - ) - else: - result = built_object - return result - - def build_clean_params(target: "Vertex") -> dict: """ Cleans the parameters of the target vertex. diff --git a/src/backend/base/langflow/helpers/flow.py b/src/backend/base/langflow/helpers/flow.py index 36b852a99..0f2a1e170 100644 --- a/src/backend/base/langflow/helpers/flow.py +++ b/src/backend/base/langflow/helpers/flow.py @@ -1,10 +1,14 @@ from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple, Type, Union, cast +from uuid import UUID -from langflow.schema.schema import INPUT_FIELD_NAME, Record -from langflow.services.database.models.flow.model import Flow -from langflow.services.deps import session_scope +from fastapi import Depends, HTTPException from pydantic.v1 import BaseModel, Field, create_model -from sqlmodel import select +from sqlmodel import Session, select + +from langflow.graph.schema import RunOutputs +from langflow.schema.schema import INPUT_FIELD_NAME, Record +from langflow.services.database.models.flow import Flow +from langflow.services.deps import get_session, session_scope if TYPE_CHECKING: from langflow.graph.graph.base import Graph @@ -51,7 +55,7 @@ async def load_flow( raise ValueError(f"Flow {flow_id} not found") if tweaks: graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) - graph = Graph.from_payload(graph_data, flow_id=flow_id) + graph = Graph.from_payload(graph_data, flow_id=flow_id, user_id=user_id) return graph @@ -67,25 +71,29 @@ async def run_flow( flow_id: Optional[str] = None, flow_name: Optional[str] = None, user_id: Optional[str] = None, -) -> Any: +) -> List[RunOutputs]: if user_id is None: raise ValueError("Session is invalid") graph = await load_flow(user_id, flow_id, flow_name, tweaks) if inputs is None: inputs = [] + if isinstance(inputs, dict): + inputs = [inputs] inputs_list = [] inputs_components = [] types = [] for input_dict in inputs: inputs_list.append({INPUT_FIELD_NAME: cast(str, input_dict.get("input_value"))}) inputs_components.append(input_dict.get("components", [])) - types.append(input_dict.get("type", [])) + types.append(input_dict.get("type", "chat")) return await graph.arun(inputs_list, inputs_components=inputs_components, types=types) -def generate_function_for_flow(inputs: List["Vertex"], flow_id: str) -> Callable[..., Awaitable[Any]]: +def generate_function_for_flow( + inputs: List["Vertex"], flow_id: str, user_id: str | UUID | None +) -> Callable[..., Awaitable[Any]]: """ Generate a dynamic flow function based on the given inputs and flow ID. @@ -129,11 +137,23 @@ async def flow_function({func_args}): tweaks = {{ {arg_mappings} }} from langflow.helpers.flow import run_flow from langchain_core.tools import ToolException + from langflow.base.flow_processing.utils import build_records_from_result_data, format_flow_output_records try: - return await run_flow( + run_outputs = await run_flow( tweaks={{key: {{'input_value': value}} for key, value in tweaks.items()}}, flow_id="{flow_id}", + user_id="{user_id}" ) + if not run_outputs: + return [] + run_output = run_outputs[0] + + records = [] + if run_output is not None: + for output in run_output.outputs: + if output: + records.extend(build_records_from_result_data(output, get_final_results_only=True)) + return format_flow_output_records(records) except Exception as e: raise ToolException(f'Error running flow: ' + e) """ @@ -145,7 +165,7 @@ async def flow_function({func_args}): def build_function_and_schema( - flow_record: Record, graph: "Graph" + flow_record: Record, graph: "Graph", user_id: str | UUID | None ) -> Tuple[Callable[..., Awaitable[Any]], Type[BaseModel]]: """ Builds a dynamic function and schema for a given flow. @@ -159,7 +179,7 @@ def build_function_and_schema( """ flow_id = flow_record.id inputs = get_flow_inputs(graph) - dynamic_flow_function = generate_function_for_flow(inputs, flow_id) + dynamic_flow_function = generate_function_for_flow(inputs, flow_id, user_id=user_id) schema = build_schema_from_inputs(flow_record.name, inputs) return dynamic_flow_function, schema @@ -200,3 +220,38 @@ def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> Type[BaseMode description = input_.description fields[field_name] = (str, Field(default="", description=description)) return create_model(name, **fields) # type: ignore + + +def get_arg_names(inputs: List["Vertex"]) -> List[dict[str, str]]: + """ + Returns a list of dictionaries containing the component name and its corresponding argument name. + + Args: + inputs (List[Vertex]): A list of Vertex objects representing the inputs. + + Returns: + List[dict[str, str]]: A list of dictionaries, where each dictionary contains the component name and its argument name. + """ + return [ + {"component_name": input_.display_name, "arg_name": input_.display_name.lower().replace(" ", "_")} + for input_ in inputs + ] + + +def get_flow_by_id_or_endpoint_name( + flow_id_or_name: str, db: Session = Depends(get_session), user_id: Optional[UUID] = None +) -> Flow: + endpoint_name = None + try: + flow_id = UUID(flow_id_or_name) + flow = db.get(Flow, flow_id) + except ValueError: + endpoint_name = flow_id_or_name + stmt = select(Flow).where(Flow.name == endpoint_name) + if user_id: + stmt = stmt.where(Flow.user_id == user_id) + flow = db.exec(stmt).first() + if flow is None: + raise HTTPException(status_code=404, detail=f"Flow identifier {flow_id_or_name} not found") + + return flow diff --git a/src/backend/base/langflow/initial_setup/setup.py b/src/backend/base/langflow/initial_setup/setup.py index 3c976cb94..27574950c 100644 --- a/src/backend/base/langflow/initial_setup/setup.py +++ b/src/backend/base/langflow/initial_setup/setup.py @@ -1,7 +1,10 @@ +import logging +import os from collections import defaultdict from copy import deepcopy from datetime import datetime, timezone from pathlib import Path +from uuid import UUID import orjson from emoji import demojize, purely_emoji # type: ignore @@ -10,10 +13,16 @@ from sqlmodel import select from langflow.base.constants import FIELD_FORMAT_ATTRIBUTES, NODE_FORMAT_ATTRIBUTES from langflow.interface.types import get_all_components +from langflow.services.auth.utils import create_super_user from langflow.services.database.models.flow.model import Flow, FlowCreate from langflow.services.database.models.folder.model import Folder, FolderCreate +from langflow.services.database.models.user.crud import get_user_by_username from langflow.services.deps import get_settings_service, session_scope +from langflow.services.database.models.folder.utils import create_default_folder_if_it_doesnt_exist +from langflow.services.deps import get_settings_service, session_scope, get_variable_service + + STARTER_FOLDER_NAME = "Starter Projects" STARTER_FOLDER_DESCRIPTION = "Starter projects to help you get started in Langflow." @@ -159,7 +168,7 @@ def create_new_project( project_data, project_icon, project_icon_bg_color, - new_folder_id + new_folder_id, ): logger.debug(f"Creating starter project {project_name}") new_project = FlowCreate( @@ -205,8 +214,65 @@ def create_starter_folder(session): return session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first() +def _is_valid_uuid(val): + try: + uuid_obj = UUID(val) + except ValueError: + return False + return str(uuid_obj) == val + +def load_flows_from_directory(): + settings_service = get_settings_service() + flows_path = settings_service.settings.load_flows_path + if not flows_path: + return + if not settings_service.auth_settings.AUTO_LOGIN: + logging.warning("AUTO_LOGIN is disabled, not loading flows from directory") + return + + with session_scope() as session: + user_id = get_user_by_username(session, settings_service.auth_settings.SUPERUSER).id + files = [f for f in os.listdir(flows_path) if os.path.isfile(os.path.join(flows_path, f))] + for filename in files: + if not filename.endswith(".json"): + continue + logger.info(f"Loading flow from file: {filename}") + with open(os.path.join(flows_path, filename), "r", encoding="utf-8") as file: + flow = orjson.loads(file.read()) + no_json_name = filename.replace(".json", "") + flow_endpoint_name = flow.get("endpoint_name") + if _is_valid_uuid(no_json_name): + flow["id"] = no_json_name + flow_id = flow.get("id") + + existing = find_existing_flow(session, flow_id, flow_endpoint_name) + if existing: + logger.info(f"Updating existing flow: {flow_id} with endpoint name {flow_endpoint_name}") + for key, value in flow.items(): + setattr(existing, key, value) + existing.updated_at = datetime.utcnow() + existing.user_id = user_id + session.add(existing) + session.commit() + else: + logger.info(f"Creating new flow: {flow_id} with endpoint name {flow_endpoint_name}") + flow["user_id"] = user_id + flow = Flow.model_validate(flow, from_attributes=True) + flow.updated_at = datetime.utcnow() + session.add(flow) + session.commit() + +def find_existing_flow(session, flow_id, flow_endpoint_name): + if flow_endpoint_name: + stmt = select(Flow).where(Flow.endpoint_name == flow_endpoint_name) + if existing := session.exec(stmt).first(): + return existing + stmt = select(Flow).where(Flow.id == flow_id) + if existing := session.exec(stmt).first(): + return existing + return None def create_or_update_starter_projects(): - components_paths = get_settings_service().settings.COMPONENTS_PATH + components_paths = get_settings_service().settings.components_path try: all_types_dict = get_all_components(components_paths, as_dict=True) except Exception as e: @@ -247,5 +313,22 @@ def create_or_update_starter_projects(): project_data, project_icon, project_icon_bg_color, - new_folder.id + new_folder.id, ) + + +def initialize_super_user_if_needed(): + settings_service = get_settings_service() + if not settings_service.auth_settings.AUTO_LOGIN: + return + username = settings_service.auth_settings.SUPERUSER + password = settings_service.auth_settings.SUPERUSER_PASSWORD + if not username or not password: + raise ValueError("SUPERUSER and SUPERUSER_PASSWORD must be set in the settings if AUTO_LOGIN is true.") + + with session_scope() as session: + super_user = create_super_user(db=session, username=username, password=password) + get_variable_service().initialize_user_variables(super_user.id, session) + create_default_folder_if_it_doesnt_exist(session, super_user.id) + session.commit() + logger.info("Super user initialized") diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json index e001f8e41..c56a8304c 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json @@ -20,7 +20,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", "fileTypes": [], "file_path": "", "password": false, @@ -149,7 +149,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", "fileTypes": [], "file_path": "", "password": false, @@ -175,7 +175,7 @@ "display_name": "Max Tokens", "advanced": true, "dynamic": false, - "info": "", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", "load_from_db": false, "title_case": false }, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json index e70285000..d2c8cf951 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json @@ -20,7 +20,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", "fileTypes": [], "file_path": "", "password": false, @@ -182,7 +182,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n", + "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n", "fileTypes": [], "file_path": "", "password": false, @@ -453,7 +453,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", "fileTypes": [], "file_path": "", "password": false, @@ -479,7 +479,7 @@ "display_name": "Max Tokens", "advanced": true, "dynamic": false, - "info": "", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", "load_from_db": false, "title_case": false }, @@ -694,7 +694,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n", + "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n", "fileTypes": [], "file_path": "", "password": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json index 5d3ab5a1b..32933e0d6 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json @@ -20,7 +20,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", "fileTypes": [], "file_path": "", "password": false, @@ -598,7 +598,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", "fileTypes": [], "file_path": "", "password": false, @@ -624,7 +624,7 @@ "display_name": "Max Tokens", "advanced": true, "dynamic": false, - "info": "", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", "load_from_db": false, "title_case": false }, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json index ef45db37d..9e51846be 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json @@ -524,7 +524,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", "fileTypes": [], "file_path": "", "password": false, @@ -679,7 +679,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", "fileTypes": [], "file_path": "", "password": false, @@ -705,7 +705,7 @@ "display_name": "Max Tokens", "advanced": true, "dynamic": false, - "info": "", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", "load_from_db": false, "title_case": false }, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json index 8563a442a..9269eeed0 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json @@ -20,7 +20,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", "fileTypes": [], "file_path": "", "password": false, @@ -130,7 +130,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", "fileTypes": [], "file_path": "", "password": false, @@ -798,7 +798,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", "fileTypes": [], "file_path": "", "password": false, @@ -824,7 +824,7 @@ "display_name": "Max Tokens", "advanced": true, "dynamic": false, - "info": "", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", "load_from_db": false, "title_case": false }, @@ -1155,7 +1155,7 @@ "list": false, "show": true, "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", "fileTypes": [], "file_path": "", "password": false, @@ -1181,7 +1181,7 @@ "display_name": "Max Tokens", "advanced": true, "dynamic": false, - "info": "", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", "load_from_db": false, "title_case": false }, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json index 097fdbbc2..654f0771b 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json @@ -1,3407 +1,3151 @@ { - "id": "51e2b78a-199b-4054-9f32-e288eef6924c", - "data": { - "nodes": [ - { - "id": "ChatInput-yxMKE", - "type": "genericNode", - "position": { - "x": 1195.5276981160775, - "y": 209.421875 - }, - "data": { - "type": "ChatInput", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Message", - "advanced": false, - "input_types": [], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "value": "what is a line" - }, - "return_record": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "return_record", - "display_name": "Return Record", - "advanced": true, - "dynamic": false, - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - "load_from_db": false, - "title_case": false - }, - "sender": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "User", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Machine", - "User" - ], - "name": "sender", - "display_name": "Sender Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "sender_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "User", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "sender_name", - "display_name": "Sender Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "session_id": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "session_id", - "display_name": "Session ID", - "advanced": true, - "dynamic": false, - "info": "If provided, the message will be stored in the memory.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Get chat inputs from the Playground.", - "icon": "ChatInput", - "base_classes": [ - "Text", - "str", - "object", - "Record" - ], - "display_name": "Chat Input", - "documentation": "", - "custom_fields": { - "sender": null, - "sender_name": null, - "input_value": null, - "session_id": null, - "return_record": null - }, - "output_types": [ - "Text", - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "ChatInput-yxMKE" - }, - "selected": false, - "width": 384, - "height": 383 + "id": "51e2b78a-199b-4054-9f32-e288eef6924c", + "data": { + "nodes": [ + { + "id": "ChatInput-yxMKE", + "type": "genericNode", + "position": { + "x": 1195.5276981160775, + "y": 209.421875 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "value": "what is a line" + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Machine", "User"], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "id": "TextOutput-BDknO", - "type": "genericNode", - "position": { - "x": 2322.600672827879, - "y": 604.9467307442569 - }, - "data": { - "type": "TextOutput", - "node": { - "template": { - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Value", - "advanced": false, - "input_types": [ - "Record", - "Text" - ], - "dynamic": false, - "info": "Text or Record to be passed as output.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "record_template": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "{text}", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "record_template", - "display_name": "Record Template", - "advanced": true, - "dynamic": false, - "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Display a text output in the Playground.", - "icon": "type", - "base_classes": [ - "object", - "Text", - "str" - ], - "display_name": "Extracted Chunks", - "documentation": "", - "custom_fields": { - "input_value": null, - "record_template": null - }, - "output_types": [ - "Text" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "TextOutput-BDknO" - }, - "selected": false, - "width": 384, - "height": 289, - "positionAbsolute": { - "x": 2322.600672827879, - "y": 604.9467307442569 - }, - "dragging": false + "description": "Get chat inputs from the Playground.", + "icon": "ChatInput", + "base_classes": ["Text", "str", "object", "Record"], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null }, - { - "id": "OpenAIEmbeddings-ZlOk1", - "type": "genericNode", - "position": { - "x": 1183.667250865064, - "y": 687.3171828430261 - }, - "data": { - "type": "OpenAIEmbeddings", - "node": { - "template": { - "allowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "allowed_special", - "display_name": "Allowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "chunk_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 1000, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_size", - "display_name": "Chunk Size", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "client": { - "type": "Any", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "client", - "display_name": "Client", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_headers": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_headers", - "display_name": "Default Headers", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_query": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_query", - "display_name": "Default Query", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "deployment": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "deployment", - "display_name": "Deployment", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "disallowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [ - "all" - ], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "disallowed_special", - "display_name": "Disallowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "embedding_ctx_length": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 8191, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding_ctx_length", - "display_name": "Embedding Context Length", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "max_retries": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 6, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "max_retries", - "display_name": "Max Retries", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "text-embedding-3-small", - "text-embedding-3-large", - "text-embedding-ada-002" - ], - "name": "model", - "display_name": "Model", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "model_kwargs": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "model_kwargs", - "display_name": "Model Kwargs", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "openai_api_base": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_base", - "display_name": "OpenAI API Base", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_key": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "OPENAI_API_KEY" - }, - "openai_api_type": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_type", - "display_name": "OpenAI API Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_version": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_api_version", - "display_name": "OpenAI API Version", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_organization": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_organization", - "display_name": "OpenAI Organization", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_proxy": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_proxy", - "display_name": "OpenAI Proxy", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "request_timeout": { - "type": "float", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "request_timeout", - "display_name": "Request Timeout", - "advanced": true, - "dynamic": false, - "info": "", - "rangeSpec": { - "step_type": "float", - "min": -1, - "max": 1, - "step": 0.1 - }, - "load_from_db": false, - "title_case": false - }, - "show_progress_bar": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "show_progress_bar", - "display_name": "Show Progress Bar", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "skip_empty": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "skip_empty", - "display_name": "Skip Empty", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_enable": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_enable", - "display_name": "TikToken Enable", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_model_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_model_name", - "display_name": "TikToken Model Name", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Generate embeddings using OpenAI models.", - "base_classes": [ - "Embeddings" - ], - "display_name": "OpenAI Embeddings", - "documentation": "", - "custom_fields": { - "openai_api_key": null, - "default_headers": null, - "default_query": null, - "allowed_special": null, - "disallowed_special": null, - "chunk_size": null, - "client": null, - "deployment": null, - "embedding_ctx_length": null, - "max_retries": null, - "model": null, - "model_kwargs": null, - "openai_api_base": null, - "openai_api_type": null, - "openai_api_version": null, - "openai_organization": null, - "openai_proxy": null, - "request_timeout": null, - "show_progress_bar": null, - "skip_empty": null, - "tiktoken_enable": null, - "tiktoken_model_name": null - }, - "output_types": [ - "Embeddings" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "OpenAIEmbeddings-ZlOk1" - }, - "selected": false, - "width": 384, - "height": 383, - "dragging": false + "output_types": ["Text", "Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-yxMKE" + }, + "selected": false, + "width": 384, + "height": 383 + }, + { + "id": "TextOutput-BDknO", + "type": "genericNode", + "position": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": ["Record", "Text"], + "dynamic": false, + "info": "Text or Record to be passed as output.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Playground.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "id": "OpenAIModel-EjXlN", - "type": "genericNode", - "position": { - "x": 3410.117202077183, - "y": 431.2038048137648 - }, - "data": { - "type": "OpenAIModel", - "node": { - "template": { - "input_value": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Input", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "max_tokens": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 256, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "max_tokens", - "display_name": "Max Tokens", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model_kwargs": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "model_kwargs", - "display_name": "Model Kwargs", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "gpt-3.5-turbo", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "name": "model_name", - "display_name": "Model Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_base": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_api_base", - "display_name": "OpenAI API Base", - "advanced": true, - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_key": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "advanced": false, - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "OPENAI_API_KEY" - }, - "stream": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "stream", - "display_name": "Stream", - "advanced": true, - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "load_from_db": false, - "title_case": false - }, - "system_message": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "system_message", - "display_name": "System Message", - "advanced": true, - "dynamic": false, - "info": "System message to pass to the model.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "temperature": { - "type": "float", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 0.1, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "temperature", - "display_name": "Temperature", - "advanced": false, - "dynamic": false, - "info": "", - "rangeSpec": { - "step_type": "float", - "min": -1, - "max": 1, - "step": 0.1 - }, - "load_from_db": false, - "title_case": false - }, - "_type": "CustomComponent" - }, - "description": "Generates text using OpenAI LLMs.", - "icon": "OpenAI", - "base_classes": [ - "object", - "Text", - "str" - ], - "display_name": "OpenAI", - "documentation": "", - "custom_fields": { - "input_value": null, - "openai_api_key": null, - "temperature": null, - "model_name": null, - "max_tokens": null, - "model_kwargs": null, - "openai_api_base": null, - "stream": null, - "system_message": null - }, - "output_types": [ - "Text" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "max_tokens", - "model_kwargs", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "input_value", - "system_message", - "stream" - ], - "beta": false - }, - "id": "OpenAIModel-EjXlN" - }, - "selected": true, - "width": 384, - "height": 563, - "positionAbsolute": { - "x": 3410.117202077183, - "y": 431.2038048137648 - }, - "dragging": false + "description": "Display a text output in the Playground.", + "icon": "type", + "base_classes": ["object", "Text", "str"], + "display_name": "Extracted Chunks", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null }, - { - "id": "Prompt-xeI6K", - "type": "genericNode", - "position": { - "x": 2969.0261961391298, - "y": 442.1613649809069 + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextOutput-BDknO" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-ZlOk1", + "type": "genericNode", + "position": { + "x": 1183.667250865064, + "y": 687.3171828430261 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, NestedDict\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": ["all"], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "OPENAI_API_KEY" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 }, - "data": { - "type": "Prompt", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "template": { - "type": "prompt", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "template", - "display_name": "Template", - "advanced": false, - "input_types": [ - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "_type": "CustomComponent", - "context": { - "field_type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "context", - "display_name": "context", - "advanced": false, - "input_types": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "type": "str" - }, - "question": { - "field_type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "question", - "display_name": "question", - "advanced": false, - "input_types": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "type": "str" - } - }, - "description": "Create a prompt template with dynamic variables.", - "icon": "prompts", - "is_input": null, - "is_output": null, - "is_composition": null, - "base_classes": [ - "object", - "Text", - "str" - ], - "name": "", - "display_name": "Prompt", - "documentation": "", - "custom_fields": { - "template": [ - "context", - "question" - ] - }, - "output_types": [ - "Text" - ], - "full_path": null, - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false, - "error": null - }, - "id": "Prompt-xeI6K", - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt" - }, - "selected": false, - "width": 384, - "height": 477, - "positionAbsolute": { - "x": 2969.0261961391298, - "y": 442.1613649809069 - }, - "dragging": false + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "id": "ChatOutput-Q39I8", - "type": "genericNode", - "position": { - "x": 3887.2073667611485, - "y": 588.4801225794856 - }, - "data": { - "type": "ChatOutput", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Message", - "advanced": false, - "input_types": [ - "Text" - ], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "record_template": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "{text}", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "record_template", - "display_name": "Record Template", - "advanced": true, - "dynamic": false, - "info": "In case of Message being a Record, this template will be used to convert it to text.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "return_record": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "return_record", - "display_name": "Return Record", - "advanced": true, - "dynamic": false, - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - "load_from_db": false, - "title_case": false - }, - "sender": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Machine", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Machine", - "User" - ], - "name": "sender", - "display_name": "Sender Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "sender_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "AI", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "sender_name", - "display_name": "Sender Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "session_id": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "session_id", - "display_name": "Session ID", - "advanced": true, - "dynamic": false, - "info": "If provided, the message will be stored in the memory.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Display a chat message in the Playground.", - "icon": "ChatOutput", - "base_classes": [ - "object", - "Text", - "Record", - "str" - ], - "display_name": "Chat Output", - "documentation": "", - "custom_fields": { - "sender": null, - "sender_name": null, - "input_value": null, - "session_id": null, - "return_record": null, - "record_template": null - }, - "output_types": [ - "Text", - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "ChatOutput-Q39I8" - }, - "selected": false, - "width": 384, - "height": 383, - "positionAbsolute": { - "x": 3887.2073667611485, - "y": 588.4801225794856 - }, - "dragging": false + "description": "Generate embeddings using OpenAI models.", + "base_classes": ["Embeddings"], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null }, - { - "id": "File-t0a6a", - "type": "genericNode", - "position": { - "x": 2257.233450682836, - "y": 1747.5389618367233 + "output_types": ["Embeddings"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-ZlOk1" + }, + "selected": false, + "width": 384, + "height": 383, + "dragging": false + }, + { + "id": "OpenAIModel-EjXlN", + "type": "genericNode", + "position": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n \"info\": \"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": MODEL_NAMES,\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str = \"gpt-4o\",\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "OPENAI_API_KEY" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 }, - "data": { - "type": "File", - "node": { - "template": { - "path": { - "type": "file", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [ - ".txt", - ".md", - ".mdx", - ".csv", - ".json", - ".yaml", - ".yml", - ".xml", - ".html", - ".htm", - ".pdf", - ".docx", - ".py", - ".sh", - ".sql", - ".js", - ".ts", - ".tsx" - ], - "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf", - "password": false, - "name": "path", - "display_name": "Path", - "advanced": false, - "dynamic": false, - "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", - "load_from_db": false, - "title_case": false, - "value": "" - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "silent_errors": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "silent_errors", - "display_name": "Silent Errors", - "advanced": true, - "dynamic": false, - "info": "If true, errors will not raise an exception.", - "load_from_db": false, - "title_case": false - }, - "_type": "CustomComponent" - }, - "description": "A generic file loader.", - "icon": "file-text", - "base_classes": [ - "Record" - ], - "display_name": "File", - "documentation": "", - "custom_fields": { - "path": null, - "silent_errors": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "File-t0a6a" - }, - "selected": false, - "width": 384, - "height": 281, - "positionAbsolute": { - "x": 2257.233450682836, - "y": 1747.5389618367233 - }, - "dragging": false + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" }, - { - "id": "RecursiveCharacterTextSplitter-tR9QM", - "type": "genericNode", - "position": { - "x": 2791.013514133929, - "y": 1462.9588953494142 - }, - "data": { - "type": "RecursiveCharacterTextSplitter", - "node": { - "template": { - "inputs": { - "type": "Document", - "required": true, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "inputs", - "display_name": "Input", - "advanced": false, - "input_types": [ - "Document", - "Record" - ], - "dynamic": false, - "info": "The texts to split.", - "load_from_db": false, - "title_case": false - }, - "chunk_overlap": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 200, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_overlap", - "display_name": "Chunk Overlap", - "advanced": false, - "dynamic": false, - "info": "The amount of overlap between chunks.", - "load_from_db": false, - "title_case": false - }, - "chunk_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 1000, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_size", - "display_name": "Chunk Size", - "advanced": false, - "dynamic": false, - "info": "The maximum length of each chunk.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\nfrom langchain_text_splitters import RecursiveCharacterTextSplitter\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "separators": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "separators", - "display_name": "Separators", - "advanced": false, - "dynamic": false, - "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": [ - "" - ] - }, - "_type": "CustomComponent" - }, - "description": "Split text into chunks of a specified length.", - "base_classes": [ - "Record" - ], - "display_name": "Recursive Character Text Splitter", - "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", - "custom_fields": { - "inputs": null, - "separators": null, - "chunk_size": null, - "chunk_overlap": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "RecursiveCharacterTextSplitter-tR9QM" - }, - "selected": false, - "width": 384, - "height": 501, - "positionAbsolute": { - "x": 2791.013514133929, - "y": 1462.9588953494142 - }, - "dragging": false + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": ["object", "Text", "str"], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null }, - { - "id": "AstraDBSearch-41nRz", - "type": "genericNode", - "position": { - "x": 1723.976434815103, - "y": 277.03317407245913 - }, - "data": { - "type": "AstraDBSearch", - "node": { - "template": { - "embedding": { - "type": "Embeddings", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding", - "display_name": "Embedding", - "advanced": false, - "dynamic": false, - "info": "Embedding to use", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Input Value", - "advanced": false, - "dynamic": false, - "info": "Input value to search", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "api_endpoint": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "api_endpoint", - "display_name": "API Endpoint", - "advanced": false, - "dynamic": false, - "info": "API endpoint URL for the Astra DB service.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_API_ENDPOINT" - }, - "batch_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "batch_size", - "display_name": "Batch Size", - "advanced": true, - "dynamic": false, - "info": "Optional number of records to process in a single batch.", - "load_from_db": false, - "title_case": false - }, - "bulk_delete_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_delete_concurrency", - "display_name": "Bulk Delete Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk delete operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_batch_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_batch_concurrency", - "display_name": "Bulk Insert Batch Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_overwrite_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_overwrite_concurrency", - "display_name": "Bulk Insert Overwrite Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "collection_indexing_policy": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_indexing_policy", - "display_name": "Collection Indexing Policy", - "advanced": true, - "dynamic": false, - "info": "Optional dictionary defining the indexing policy for the collection.", - "load_from_db": false, - "title_case": false - }, - "collection_name": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_name", - "display_name": "Collection Name", - "advanced": false, - "dynamic": false, - "info": "The name of the collection within Astra DB where the vectors will be stored.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "langflow" - }, - "metadata_indexing_exclude": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_exclude", - "display_name": "Metadata Indexing Exclude", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to exclude from the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metadata_indexing_include": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_include", - "display_name": "Metadata Indexing Include", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to include in the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metric": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metric", - "display_name": "Metric", - "advanced": true, - "dynamic": false, - "info": "Optional distance metric for vector comparisons in the vector store.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "namespace": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "namespace", - "display_name": "Namespace", - "advanced": true, - "dynamic": false, - "info": "Optional namespace within Astra DB to use for the collection.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "number_of_results": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 4, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "number_of_results", - "display_name": "Number of Results", - "advanced": true, - "dynamic": false, - "info": "Number of results to return.", - "load_from_db": false, - "title_case": false - }, - "pre_delete_collection": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "pre_delete_collection", - "display_name": "Pre Delete Collection", - "advanced": true, - "dynamic": false, - "info": "Boolean flag to determine whether to delete the collection before creating a new one.", - "load_from_db": false, - "title_case": false - }, - "search_type": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Similarity", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Similarity", - "MMR" - ], - "name": "search_type", - "display_name": "Search Type", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "setup_mode": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Sync", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Sync", - "Async", - "Off" - ], - "name": "setup_mode", - "display_name": "Setup Mode", - "advanced": true, - "dynamic": false, - "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "token": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "token", - "display_name": "Token", - "advanced": false, - "dynamic": false, - "info": "Authentication token for accessing Astra DB.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_APPLICATION_TOKEN" - }, - "_type": "CustomComponent" - }, - "description": "Searches an existing Astra DB Vector Store.", - "icon": "AstraDB", - "base_classes": [ - "Record" - ], - "display_name": "Astra DB Search", - "documentation": "", - "custom_fields": { - "embedding": null, - "collection_name": null, - "input_value": null, - "token": null, - "api_endpoint": null, - "search_type": null, - "number_of_results": null, - "namespace": null, - "metric": null, - "batch_size": null, - "bulk_insert_batch_concurrency": null, - "bulk_insert_overwrite_concurrency": null, - "bulk_delete_concurrency": null, - "setup_mode": null, - "pre_delete_collection": null, - "metadata_indexing_include": null, - "metadata_indexing_exclude": null, - "collection_indexing_policy": null - }, - "output_types": [ - "Record" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "token", - "api_endpoint", - "collection_name", - "input_value", - "embedding" - ], - "beta": false - }, - "id": "AstraDBSearch-41nRz" - }, - "selected": false, - "width": 384, - "height": 713, - "dragging": false, - "positionAbsolute": { - "x": 1723.976434815103, - "y": 277.03317407245913 - } + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-EjXlN" + }, + "selected": true, + "width": 384, + "height": 563, + "positionAbsolute": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "dragging": false + }, + { + "id": "Prompt-xeI6K", + "type": "genericNode", + "position": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "context": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "context", + "display_name": "context", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "question": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "question", + "display_name": "question", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } }, - { - "id": "AstraDB-eUCSS", - "type": "genericNode", - "position": { - "x": 3372.04958055989, - "y": 1611.0742035495277 - }, - "data": { - "type": "AstraDB", - "node": { - "template": { - "embedding": { - "type": "Embeddings", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding", - "display_name": "Embedding", - "advanced": false, - "dynamic": false, - "info": "Embedding to use", - "load_from_db": false, - "title_case": false - }, - "inputs": { - "type": "Record", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "inputs", - "display_name": "Inputs", - "advanced": false, - "dynamic": false, - "info": "Optional list of records to be processed and stored in the vector store.", - "load_from_db": false, - "title_case": false - }, - "api_endpoint": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "api_endpoint", - "display_name": "API Endpoint", - "advanced": false, - "dynamic": false, - "info": "API endpoint URL for the Astra DB service.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_API_ENDPOINT" - }, - "batch_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "batch_size", - "display_name": "Batch Size", - "advanced": true, - "dynamic": false, - "info": "Optional number of records to process in a single batch.", - "load_from_db": false, - "title_case": false - }, - "bulk_delete_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_delete_concurrency", - "display_name": "Bulk Delete Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk delete operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_batch_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_batch_concurrency", - "display_name": "Bulk Insert Batch Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations.", - "load_from_db": false, - "title_case": false - }, - "bulk_insert_overwrite_concurrency": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "bulk_insert_overwrite_concurrency", - "display_name": "Bulk Insert Overwrite Concurrency", - "advanced": true, - "dynamic": false, - "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import List, Optional, Union\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\nfrom langchain_core.retrievers import BaseRetriever\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "collection_indexing_policy": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_indexing_policy", - "display_name": "Collection Indexing Policy", - "advanced": true, - "dynamic": false, - "info": "Optional dictionary defining the indexing policy for the collection.", - "load_from_db": false, - "title_case": false - }, - "collection_name": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "collection_name", - "display_name": "Collection Name", - "advanced": false, - "dynamic": false, - "info": "The name of the collection within Astra DB where the vectors will be stored.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "langflow" - }, - "metadata_indexing_exclude": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_exclude", - "display_name": "Metadata Indexing Exclude", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to exclude from the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metadata_indexing_include": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metadata_indexing_include", - "display_name": "Metadata Indexing Include", - "advanced": true, - "dynamic": false, - "info": "Optional list of metadata fields to include in the indexing.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "metric": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "metric", - "display_name": "Metric", - "advanced": true, - "dynamic": false, - "info": "Optional distance metric for vector comparisons in the vector store.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "namespace": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "namespace", - "display_name": "Namespace", - "advanced": true, - "dynamic": false, - "info": "Optional namespace within Astra DB to use for the collection.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "pre_delete_collection": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "pre_delete_collection", - "display_name": "Pre Delete Collection", - "advanced": true, - "dynamic": false, - "info": "Boolean flag to determine whether to delete the collection before creating a new one.", - "load_from_db": false, - "title_case": false - }, - "setup_mode": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Sync", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "Sync", - "Async", - "Off" - ], - "name": "setup_mode", - "display_name": "Setup Mode", - "advanced": true, - "dynamic": false, - "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "token": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "token", - "display_name": "Token", - "advanced": false, - "dynamic": false, - "info": "Authentication token for accessing Astra DB.", - "load_from_db": true, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "ASTRA_DB_APPLICATION_TOKEN" - }, - "_type": "CustomComponent" - }, - "description": "Builds or loads an Astra DB Vector Store.", - "icon": "AstraDB", - "base_classes": [ - "VectorStore" - ], - "display_name": "Astra DB", - "documentation": "", - "custom_fields": { - "embedding": null, - "token": null, - "api_endpoint": null, - "collection_name": null, - "inputs": null, - "namespace": null, - "metric": null, - "batch_size": null, - "bulk_insert_batch_concurrency": null, - "bulk_insert_overwrite_concurrency": null, - "bulk_delete_concurrency": null, - "setup_mode": null, - "pre_delete_collection": null, - "metadata_indexing_include": null, - "metadata_indexing_exclude": null, - "collection_indexing_policy": null - }, - "output_types": [ - "VectorStore" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [ - "token", - "api_endpoint", - "collection_name", - "inputs", - "embedding" - ], - "beta": false - }, - "id": "AstraDB-eUCSS" - }, - "selected": false, - "width": 384, - "height": 573, - "positionAbsolute": { - "x": 3372.04958055989, - "y": 1611.0742035495277 - }, - "dragging": false + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": ["object", "Text", "str"], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": ["context", "question"] }, - { - "id": "OpenAIEmbeddings-9TPjc", - "type": "genericNode", - "position": { - "x": 2814.0402191223047, - "y": 1955.9268168273086 - }, - "data": { - "type": "OpenAIEmbeddings", - "node": { - "template": { - "allowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "allowed_special", - "display_name": "Allowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "chunk_size": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 1000, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "chunk_size", - "display_name": "Chunk Size", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "client": { - "type": "Any", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "client", - "display_name": "Client", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_headers": { - "type": "dict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_headers", - "display_name": "Default Headers", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "default_query": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "default_query", - "display_name": "Default Query", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "deployment": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "deployment", - "display_name": "Deployment", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "disallowed_special": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": [ - "all" - ], - "fileTypes": [], - "file_path": "", - "password": false, - "name": "disallowed_special", - "display_name": "Disallowed Special", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "embedding_ctx_length": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 8191, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "embedding_ctx_length", - "display_name": "Embedding Context Length", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "max_retries": { - "type": "int", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": 6, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "max_retries", - "display_name": "Max Retries", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "model": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "text-embedding-ada-002", - "fileTypes": [], - "file_path": "", - "password": false, - "options": [ - "text-embedding-3-small", - "text-embedding-3-large", - "text-embedding-ada-002" - ], - "name": "model", - "display_name": "Model", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "model_kwargs": { - "type": "NestedDict", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": {}, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "model_kwargs", - "display_name": "Model Kwargs", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "openai_api_base": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_base", - "display_name": "OpenAI API Base", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_key": { - "type": "str", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_key", - "display_name": "OpenAI API Key", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ], - "value": "OPENAI_API_KEY" - }, - "openai_api_type": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": true, - "name": "openai_api_type", - "display_name": "OpenAI API Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_api_version": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_api_version", - "display_name": "OpenAI API Version", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_organization": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_organization", - "display_name": "OpenAI Organization", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "openai_proxy": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "openai_proxy", - "display_name": "OpenAI Proxy", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "request_timeout": { - "type": "float", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "request_timeout", - "display_name": "Request Timeout", - "advanced": true, - "dynamic": false, - "info": "", - "rangeSpec": { - "step_type": "float", - "min": -1, - "max": 1, - "step": 0.1 - }, - "load_from_db": false, - "title_case": false - }, - "show_progress_bar": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "show_progress_bar", - "display_name": "Show Progress Bar", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "skip_empty": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "skip_empty", - "display_name": "Skip Empty", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_enable": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_enable", - "display_name": "TikToken Enable", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "tiktoken_model_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "tiktoken_model_name", - "display_name": "TikToken Model Name", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": [ - "Text" - ] - }, - "_type": "CustomComponent" - }, - "description": "Generate embeddings using OpenAI models.", - "base_classes": [ - "Embeddings" - ], - "display_name": "OpenAI Embeddings", - "documentation": "", - "custom_fields": { - "openai_api_key": null, - "default_headers": null, - "default_query": null, - "allowed_special": null, - "disallowed_special": null, - "chunk_size": null, - "client": null, - "deployment": null, - "embedding_ctx_length": null, - "max_retries": null, - "model": null, - "model_kwargs": null, - "openai_api_base": null, - "openai_api_type": null, - "openai_api_version": null, - "openai_organization": null, - "openai_proxy": null, - "request_timeout": null, - "show_progress_bar": null, - "skip_empty": null, - "tiktoken_enable": null, - "tiktoken_model_name": null - }, - "output_types": [ - "Embeddings" - ], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "OpenAIEmbeddings-9TPjc" - }, - "selected": false, - "width": 384, - "height": 383, - "positionAbsolute": { - "x": 2814.0402191223047, - "y": 1955.9268168273086 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "TextOutput-BDknO", - "target": "Prompt-xeI6K", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-TextOutput-BDknO{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "context", - "id": "Prompt-xeI6K", - "inputTypes": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], - "dataType": "TextOutput", - "id": "TextOutput-BDknO" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "output_types": ["Text"], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-xeI6K", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 477, + "positionAbsolute": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "dragging": false + }, + { + "id": "ChatOutput-Q39I8", + "type": "genericNode", + "position": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Machine", "User"], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" }, - { - "source": "ChatInput-yxMKE", - "target": "Prompt-xeI6K", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "question", - "id": "Prompt-xeI6K", - "inputTypes": [ - "Document", - "BaseOutputParser", - "Record", - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "Text", - "str", - "object", - "Record" - ], - "dataType": "ChatInput", - "id": "ChatInput-yxMKE" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "description": "Display a chat message in the Playground.", + "icon": "ChatOutput", + "base_classes": ["object", "Text", "Record", "str"], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null }, - { - "source": "Prompt-xeI6K", - "target": "OpenAIModel-EjXlN", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-Prompt-xeI6K{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}-OpenAIModel-EjXlN{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-EjXlN", - "inputTypes": [ - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], - "dataType": "Prompt", - "id": "Prompt-xeI6K" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "output_types": ["Text", "Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-Q39I8" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "dragging": false + }, + { + "id": "File-t0a6a", + "type": "genericNode", + "position": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "data": { + "type": "File", + "node": { + "template": { + "path": { + "type": "file", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [ + ".txt", + ".md", + ".mdx", + ".csv", + ".json", + ".yaml", + ".yml", + ".xml", + ".html", + ".htm", + ".pdf", + ".docx", + ".py", + ".sh", + ".sql", + ".js", + ".ts", + ".tsx" + ], + "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf", + "password": false, + "name": "path", + "display_name": "Path", + "advanced": false, + "dynamic": false, + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", + "load_from_db": false, + "title_case": false, + "value": "" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "silent_errors": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "silent_errors", + "display_name": "Silent Errors", + "advanced": true, + "dynamic": false, + "info": "If true, errors will not raise an exception.", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" }, - { - "source": "OpenAIModel-EjXlN", - "target": "ChatOutput-Q39I8", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "id": "reactflow__edge-OpenAIModel-EjXlN{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}-ChatOutput-Q39I8{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-Q39I8", - "inputTypes": [ - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "object", - "Text", - "str" - ], - "dataType": "OpenAIModel", - "id": "OpenAIModel-EjXlN" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "description": "A generic file loader.", + "icon": "file-text", + "base_classes": ["Record"], + "display_name": "File", + "documentation": "", + "custom_fields": { + "path": null, + "silent_errors": null }, - { - "source": "File-t0a6a", - "target": "RecursiveCharacterTextSplitter-tR9QM", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}", - "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", - "id": "reactflow__edge-File-t0a6a{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}-RecursiveCharacterTextSplitter-tR9QM{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", - "data": { - "targetHandle": { - "fieldName": "inputs", - "id": "RecursiveCharacterTextSplitter-tR9QM", - "inputTypes": [ - "Document", - "Record" - ], - "type": "Document" - }, - "sourceHandle": { - "baseClasses": [ - "Record" - ], - "dataType": "File", - "id": "File-t0a6a" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "selected": false + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "File-t0a6a" + }, + "selected": false, + "width": 384, + "height": 281, + "positionAbsolute": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "dragging": false + }, + { + "id": "RecursiveCharacterTextSplitter-tR9QM", + "type": "genericNode", + "position": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "data": { + "type": "RecursiveCharacterTextSplitter", + "node": { + "template": { + "inputs": { + "type": "Document", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Input", + "advanced": false, + "input_types": ["Document", "Record"], + "dynamic": false, + "info": "The texts to split.", + "load_from_db": false, + "title_case": false + }, + "chunk_overlap": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 200, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_overlap", + "display_name": "Chunk Overlap", + "advanced": false, + "dynamic": false, + "info": "The amount of overlap between chunks.", + "load_from_db": false, + "title_case": false + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": false, + "dynamic": false, + "info": "The maximum length of each chunk.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_core.documents import Document\nfrom langchain_text_splitters import RecursiveCharacterTextSplitter\n\nfrom langflow.custom import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "separators": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "separators", + "display_name": "Separators", + "advanced": false, + "dynamic": false, + "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": [""] + }, + "_type": "CustomComponent" }, - { - "source": "OpenAIEmbeddings-ZlOk1", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}", - "target": "AstraDBSearch-41nRz", - "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", - "data": { - "targetHandle": { - "fieldName": "embedding", - "id": "AstraDBSearch-41nRz", - "inputTypes": null, - "type": "Embeddings" - }, - "sourceHandle": { - "baseClasses": [ - "Embeddings" - ], - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-ZlOk1" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}" + "description": "Split text into chunks of a specified length.", + "base_classes": ["Record"], + "display_name": "Recursive Character Text Splitter", + "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", + "custom_fields": { + "inputs": null, + "separators": null, + "chunk_size": null, + "chunk_overlap": null }, - { - "source": "ChatInput-yxMKE", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", - "target": "AstraDBSearch-41nRz", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "AstraDBSearch-41nRz", - "inputTypes": [ - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "Text", - "str", - "object", - "Record" - ], - "dataType": "ChatInput", - "id": "ChatInput-yxMKE" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "RecursiveCharacterTextSplitter-tR9QM" + }, + "selected": false, + "width": 384, + "height": 501, + "positionAbsolute": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "dragging": false + }, + { + "id": "AstraDBSearch-41nRz", + "type": "genericNode", + "position": { + "x": 1723.976434815103, + "y": 277.03317407245913 + }, + "data": { + "type": "AstraDBSearch", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input Value", + "advanced": false, + "dynamic": false, + "info": "Input value to search", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_API_ENDPOINT" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "number_of_results": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 4, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "number_of_results", + "display_name": "Number of Results", + "advanced": true, + "dynamic": false, + "info": "Number of results to return.", + "load_from_db": false, + "title_case": false + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "search_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Similarity", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Similarity", "MMR"], + "name": "search_type", + "display_name": "Search Type", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Sync", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Sync", "Async", "Off"], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_APPLICATION_TOKEN" + }, + "_type": "CustomComponent" }, - { - "source": "RecursiveCharacterTextSplitter-tR9QM", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}", - "target": "AstraDB-eUCSS", - "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", - "data": { - "targetHandle": { - "fieldName": "inputs", - "id": "AstraDB-eUCSS", - "inputTypes": null, - "type": "Record" - }, - "sourceHandle": { - "baseClasses": [ - "Record" - ], - "dataType": "RecursiveCharacterTextSplitter", - "id": "RecursiveCharacterTextSplitter-tR9QM" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", - "selected": false + "description": "Searches an existing Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": ["Record"], + "display_name": "Astra DB Search", + "documentation": "", + "custom_fields": { + "embedding": null, + "collection_name": null, + "input_value": null, + "token": null, + "api_endpoint": null, + "search_type": null, + "number_of_results": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null }, - { - "source": "OpenAIEmbeddings-9TPjc", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}", - "target": "AstraDB-eUCSS", - "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", - "data": { - "targetHandle": { - "fieldName": "embedding", - "id": "AstraDB-eUCSS", - "inputTypes": null, - "type": "Embeddings" - }, - "sourceHandle": { - "baseClasses": [ - "Embeddings" - ], - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-9TPjc" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", - "selected": false - }, - { - "source": "AstraDBSearch-41nRz", - "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}", - "target": "TextOutput-BDknO", - "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "TextOutput-BDknO", - "inputTypes": [ - "Record", - "Text" - ], - "type": "str" - }, - "sourceHandle": { - "baseClasses": [ - "Record" - ], - "dataType": "AstraDBSearch", - "id": "AstraDBSearch-41nRz" - } - }, - "style": { - "stroke": "#555" - }, - "className": "stroke-gray-900 stroke-connection", - "id": "reactflow__edge-AstraDBSearch-41nRz{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}-TextOutput-BDknO{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" - } - ], - "viewport": { - "x": -259.6782520315529, - "y": 90.3428735006047, - "zoom": 0.2687057134854984 + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "input_value", + "embedding" + ], + "beta": false + }, + "id": "AstraDBSearch-41nRz" + }, + "selected": false, + "width": 384, + "height": 713, + "dragging": false, + "positionAbsolute": { + "x": 1723.976434815103, + "y": 277.03317407245913 } - }, - "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", - "name": "Vector Store RAG", - "last_tested_version": "1.0.0a0", - "is_component": false + }, + { + "id": "AstraDB-eUCSS", + "type": "genericNode", + "position": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "data": { + "type": "AstraDB", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "inputs": { + "type": "Record", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Inputs", + "advanced": false, + "dynamic": false, + "info": "Optional list of records to be processed and stored in the vector store.", + "load_from_db": false, + "title_case": false + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_API_ENDPOINT" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional, Union\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\nfrom langchain_core.retrievers import BaseRetriever\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Sync", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Sync", "Async", "Off"], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like \u201cSync\u201d, \u201cAsync\u201d, or \u201cOff\u201d.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "ASTRA_DB_APPLICATION_TOKEN" + }, + "_type": "CustomComponent" + }, + "description": "Builds or loads an Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": ["VectorStore"], + "display_name": "Astra DB", + "documentation": "", + "custom_fields": { + "embedding": null, + "token": null, + "api_endpoint": null, + "collection_name": null, + "inputs": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null + }, + "output_types": ["VectorStore"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "inputs", + "embedding" + ], + "beta": false + }, + "id": "AstraDB-eUCSS" + }, + "selected": false, + "width": 384, + "height": 573, + "positionAbsolute": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-9TPjc", + "type": "genericNode", + "position": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, NestedDict\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": ["all"], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": true, + "title_case": false, + "input_types": ["Text"], + "value": "OPENAI_API_KEY" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Generate embeddings using OpenAI models.", + "base_classes": ["Embeddings"], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null + }, + "output_types": ["Embeddings"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-9TPjc" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "TextOutput-BDknO", + "target": "Prompt-xeI6K", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-TextOutput-BDknO{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153TextOutput\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153context\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "context", + "id": "Prompt-xeI6K", + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "TextOutput", + "id": "TextOutput-BDknO" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "ChatInput-yxMKE", + "target": "Prompt-xeI6K", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-Prompt-xeI6K{\u0153fieldName\u0153:\u0153question\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153BaseOutputParser\u0153,\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "question", + "id": "Prompt-xeI6K", + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Text", "str", "object", "Record"], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "Prompt-xeI6K", + "target": "OpenAIModel-EjXlN", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-Prompt-xeI6K{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153Prompt\u0153,\u0153id\u0153:\u0153Prompt-xeI6K\u0153}-OpenAIModel-EjXlN{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-EjXlN", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "Prompt", + "id": "Prompt-xeI6K" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIModel-EjXlN", + "target": "ChatOutput-Q39I8", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "id": "reactflow__edge-OpenAIModel-EjXlN{\u0153baseClasses\u0153:[\u0153object\u0153,\u0153Text\u0153,\u0153str\u0153],\u0153dataType\u0153:\u0153OpenAIModel\u0153,\u0153id\u0153:\u0153OpenAIModel-EjXlN\u0153}-ChatOutput-Q39I8{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153ChatOutput-Q39I8\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-Q39I8", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "OpenAIModel", + "id": "OpenAIModel-EjXlN" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "File-t0a6a", + "target": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}", + "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", + "id": "reactflow__edge-File-t0a6a{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153File\u0153,\u0153id\u0153:\u0153File-t0a6a\u0153}-RecursiveCharacterTextSplitter-tR9QM{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153,\u0153inputTypes\u0153:[\u0153Document\u0153,\u0153Record\u0153],\u0153type\u0153:\u0153Document\u0153}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "RecursiveCharacterTextSplitter-tR9QM", + "inputTypes": ["Document", "Record"], + "type": "Document" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "File", + "id": "File-t0a6a" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIEmbeddings-ZlOk1", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDBSearch-41nRz", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": ["Embeddings"], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-ZlOk1" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-ZlOk1\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}" + }, + { + "source": "ChatInput-yxMKE", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "AstraDBSearch-41nRz", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Text", "str", "object", "Record"], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatInput-yxMKE{\u0153baseClasses\u0153:[\u0153Text\u0153,\u0153str\u0153,\u0153object\u0153,\u0153Record\u0153],\u0153dataType\u0153:\u0153ChatInput\u0153,\u0153id\u0153:\u0153ChatInput-yxMKE\u0153}-AstraDBSearch-41nRz{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153,\u0153inputTypes\u0153:[\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" + }, + { + "source": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}", + "target": "AstraDB-eUCSS", + "targetHandle": "{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Record" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "RecursiveCharacterTextSplitter", + "id": "RecursiveCharacterTextSplitter-tR9QM" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153RecursiveCharacterTextSplitter\u0153,\u0153id\u0153:\u0153RecursiveCharacterTextSplitter-tR9QM\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153inputs\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Record\u0153}", + "selected": false + }, + { + "source": "OpenAIEmbeddings-9TPjc", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}", + "target": "AstraDB-eUCSS", + "targetHandle": "{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": ["Embeddings"], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-9TPjc" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{\u0153baseClasses\u0153:[\u0153Embeddings\u0153],\u0153dataType\u0153:\u0153OpenAIEmbeddings\u0153,\u0153id\u0153:\u0153OpenAIEmbeddings-9TPjc\u0153}-AstraDB-eUCSS{\u0153fieldName\u0153:\u0153embedding\u0153,\u0153id\u0153:\u0153AstraDB-eUCSS\u0153,\u0153inputTypes\u0153:null,\u0153type\u0153:\u0153Embeddings\u0153}", + "selected": false + }, + { + "source": "AstraDBSearch-41nRz", + "sourceHandle": "{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}", + "target": "TextOutput-BDknO", + "targetHandle": "{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-BDknO", + "inputTypes": ["Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "AstraDBSearch", + "id": "AstraDBSearch-41nRz" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-AstraDBSearch-41nRz{\u0153baseClasses\u0153:[\u0153Record\u0153],\u0153dataType\u0153:\u0153AstraDBSearch\u0153,\u0153id\u0153:\u0153AstraDBSearch-41nRz\u0153}-TextOutput-BDknO{\u0153fieldName\u0153:\u0153input_value\u0153,\u0153id\u0153:\u0153TextOutput-BDknO\u0153,\u0153inputTypes\u0153:[\u0153Record\u0153,\u0153Text\u0153],\u0153type\u0153:\u0153str\u0153}" + } + ], + "viewport": { + "x": -259.6782520315529, + "y": 90.3428735006047, + "zoom": 0.2687057134854984 + } + }, + "description": "Visit https://pre-release.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", + "name": "Vector Store RAG", + "last_tested_version": "1.0.0a0", + "is_component": false } diff --git a/src/backend/base/langflow/interface/agents/__init__.py b/src/backend/base/langflow/interface/agents/__init__.py deleted file mode 100644 index df15bc39b..000000000 --- a/src/backend/base/langflow/interface/agents/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from langflow.interface.agents.base import AgentCreator - -__all__ = ["AgentCreator"] diff --git a/src/backend/base/langflow/interface/agents/base.py b/src/backend/base/langflow/interface/agents/base.py deleted file mode 100644 index ee510580c..000000000 --- a/src/backend/base/langflow/interface/agents/base.py +++ /dev/null @@ -1,62 +0,0 @@ -from typing import ClassVar, Dict, List, Optional - -from langchain.agents import types -from langflow.interface.agents.custom import CUSTOM_AGENTS -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.utils import build_template_from_class -from langflow.legacy_custom.customs import get_custom_nodes -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.agents import AgentFrontendNode -from langflow.utils.util import build_template_from_method -from loguru import logger - - -class AgentCreator(LangChainTypeCreator): - type_name: str = "agents" - - from_method_nodes: ClassVar[Dict] = {"ZeroShotAgent": "from_llm_and_tools"} - - @property - def frontend_node_class(self) -> type[AgentFrontendNode]: - return AgentFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - self.type_dict = types.AGENT_TO_CLASS - # Add JsonAgent to the list of agents - for name, agent in CUSTOM_AGENTS.items(): - # TODO: validate AgentType - self.type_dict[name] = agent # type: ignore - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - try: - if name in get_custom_nodes(self.type_name).keys(): - return get_custom_nodes(self.type_name)[name] - elif name in self.from_method_nodes: - return build_template_from_method( - name, - type_to_cls_dict=self.type_to_loader_dict, - add_function=True, - method_name=self.from_method_nodes[name], - ) - return build_template_from_class(name, self.type_to_loader_dict, add_function=True) - except ValueError as exc: - raise ValueError("Agent not found") from exc - except AttributeError as exc: - logger.error(f"Agent {name} not loaded: {exc}") - return None - - # Now this is a generator - def to_list(self) -> List[str]: - names = [] - settings_service = get_settings_service() - for _, agent in self.type_to_loader_dict.items(): - agent_name = agent.function_name() if hasattr(agent, "function_name") else agent.__name__ - if agent_name in settings_service.settings.AGENTS or settings_service.settings.DEV: - names.append(agent_name) - return names - - -agent_creator = AgentCreator() diff --git a/src/backend/base/langflow/interface/agents/custom.py b/src/backend/base/langflow/interface/agents/custom.py deleted file mode 100644 index 36d9bd653..000000000 --- a/src/backend/base/langflow/interface/agents/custom.py +++ /dev/null @@ -1,265 +0,0 @@ -from typing import Any, Optional - -from langchain.agents import AgentExecutor, ZeroShotAgent -from langchain.agents.agent_toolkits import VectorStoreInfo, VectorStoreRouterToolkit, VectorStoreToolkit -from langchain.agents.agent_toolkits.vectorstore.prompt import PREFIX as VECTORSTORE_PREFIX -from langchain.agents.agent_toolkits.vectorstore.prompt import ROUTER_PREFIX as VECTORSTORE_ROUTER_PREFIX -from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS -from langchain.chains.llm import LLMChain -from langchain_community.utilities import SQLDatabase -from langchain.tools.sql_database.prompt import QUERY_CHECKER -from langchain_community.agent_toolkits import SQLDatabaseToolkit -from langchain_community.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX -from langchain_community.agent_toolkits.json.toolkit import JsonToolkit -from langchain_community.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX -from langchain_experimental.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX -from langchain_experimental.agents.agent_toolkits.pandas.prompt import SUFFIX_WITH_DF as PANDAS_SUFFIX -from langchain_experimental.tools.python.tool import PythonAstREPLTool - -from langflow.interface.base import CustomAgentExecutor -from langchain_community.tools import ( - InfoSQLDatabaseTool, - ListSQLDatabaseTool, - QuerySQLCheckerTool, - QuerySQLDataBaseTool, -) -from langchain_core.language_models import BaseLanguageModel -from langchain_core.prompts import PromptTemplate - - -class JsonAgent(CustomAgentExecutor): - """Json agent""" - - @staticmethod - def function_name(): - return "JsonAgent" - - @classmethod - def initialize(cls, *args, **kwargs): - return cls.from_toolkit_and_llm(*args, **kwargs) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @classmethod - def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel): - tools = toolkit if isinstance(toolkit, list) else toolkit.get_tools() - tool_names = list({tool.name for tool in tools}) - prompt = ZeroShotAgent.create_prompt( - tools, - prefix=JSON_PREFIX, - suffix=JSON_SUFFIX, - format_instructions=FORMAT_INSTRUCTIONS, - input_variables=None, - ) - llm_chain = LLMChain( - llm=llm, - prompt=prompt, - ) - agent = ZeroShotAgent( - llm_chain=llm_chain, - allowed_tools=tool_names, # type: ignore - ) - return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -class CSVAgent(CustomAgentExecutor): - """CSV agent""" - - @staticmethod - def function_name(): - return "CSVAgent" - - @classmethod - def initialize(cls, *args, **kwargs): - return cls.from_toolkit_and_llm(*args, **kwargs) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @classmethod - def from_toolkit_and_llm( - cls, path: str, llm: BaseLanguageModel, pandas_kwargs: Optional[dict] = None, **kwargs: Any - ): - import pandas as pd # type: ignore - - _kwargs = pandas_kwargs or {} - df = pd.read_csv(path, **_kwargs) - - tools = [PythonAstREPLTool(locals={"df": df})] # type: ignore - prompt = ZeroShotAgent.create_prompt( - tools, - prefix=PANDAS_PREFIX, - suffix=PANDAS_SUFFIX, - input_variables=["df_head", "input", "agent_scratchpad"], - ) - partial_prompt = prompt.partial(df_head=str(df.head())) - llm_chain = LLMChain( - llm=llm, - prompt=partial_prompt, - ) - tool_names = list({tool.name for tool in tools}) - agent = ZeroShotAgent( - llm_chain=llm_chain, - allowed_tools=tool_names, - **kwargs, # type: ignore - ) - - return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -class VectorStoreAgent(CustomAgentExecutor): - """Vector store agent""" - - @staticmethod - def function_name(): - return "VectorStoreAgent" - - @classmethod - def initialize(cls, *args, **kwargs): - return cls.from_toolkit_and_llm(*args, **kwargs) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @classmethod - def from_toolkit_and_llm(cls, llm: BaseLanguageModel, vectorstoreinfo: VectorStoreInfo, **kwargs: Any): - """Construct a vectorstore agent from an LLM and tools.""" - - toolkit = VectorStoreToolkit(vectorstore_info=vectorstoreinfo, llm=llm) - - tools = toolkit.get_tools() - prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_PREFIX) - llm_chain = LLMChain( - llm=llm, - prompt=prompt, - ) - tool_names = list({tool.name for tool in tools}) - agent = ZeroShotAgent( - llm_chain=llm_chain, - allowed_tools=tool_names, - **kwargs, # type: ignore - ) - return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, verbose=True, handle_parsing_errors=True) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -class SQLAgent(CustomAgentExecutor): - """SQL agent""" - - @staticmethod - def function_name(): - return "SQLAgent" - - @classmethod - def initialize(cls, *args, **kwargs): - return cls.from_toolkit_and_llm(*args, **kwargs) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @classmethod - def from_toolkit_and_llm(cls, llm: BaseLanguageModel, database_uri: str, **kwargs: Any): - """Construct an SQL agent from an LLM and tools.""" - db = SQLDatabase.from_uri(database_uri) - toolkit = SQLDatabaseToolkit(db=db, llm=llm) - - llmchain = LLMChain( - llm=llm, - prompt=PromptTemplate(template=QUERY_CHECKER, input_variables=["query", "dialect"]), - ) - - tools = [ - QuerySQLDataBaseTool(db=db), # type: ignore - InfoSQLDatabaseTool(db=db), # type: ignore - ListSQLDatabaseTool(db=db), # type: ignore - QuerySQLCheckerTool(db=db, llm_chain=llmchain, llm=llm), # type: ignore - ] - - prefix = SQL_PREFIX.format(dialect=toolkit.dialect, top_k=10) - prompt = ZeroShotAgent.create_prompt( - tools=tools, # type: ignore - prefix=prefix, - suffix=SQL_SUFFIX, - format_instructions=FORMAT_INSTRUCTIONS, - ) - llm_chain = LLMChain( - llm=llm, - prompt=prompt, - ) - tool_names = list({tool.name for tool in tools}) # type: ignore - agent = ZeroShotAgent( - llm_chain=llm_chain, - allowed_tools=tool_names, - **kwargs, # type: ignore - ) - return AgentExecutor.from_agent_and_tools( - agent=agent, - tools=tools, # type: ignore - verbose=True, - max_iterations=15, - early_stopping_method="force", - handle_parsing_errors=True, - ) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -class VectorStoreRouterAgent(CustomAgentExecutor): - """Vector Store Router Agent""" - - @staticmethod - def function_name(): - return "VectorStoreRouterAgent" - - @classmethod - def initialize(cls, *args, **kwargs): - return cls.from_toolkit_and_llm(*args, **kwargs) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @classmethod - def from_toolkit_and_llm( - cls, llm: BaseLanguageModel, vectorstoreroutertoolkit: VectorStoreRouterToolkit, **kwargs: Any - ): - """Construct a vector store router agent from an LLM and tools.""" - - tools = ( - vectorstoreroutertoolkit - if isinstance(vectorstoreroutertoolkit, list) - else vectorstoreroutertoolkit.get_tools() - ) - prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_ROUTER_PREFIX) - llm_chain = LLMChain( - llm=llm, - prompt=prompt, - ) - tool_names = list({tool.name for tool in tools}) - agent = ZeroShotAgent( - llm_chain=llm_chain, - allowed_tools=tool_names, - **kwargs, # type: ignore - ) - return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, verbose=True, handle_parsing_errors=True) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -CUSTOM_AGENTS = { - "JsonAgent": JsonAgent, - "CSVAgent": CSVAgent, - "VectorStoreAgent": VectorStoreAgent, - "VectorStoreRouterAgent": VectorStoreRouterAgent, - "SQLAgent": SQLAgent, -} diff --git a/src/backend/base/langflow/interface/agents/prebuilt.py b/src/backend/base/langflow/interface/agents/prebuilt.py deleted file mode 100644 index 9e59a76e1..000000000 --- a/src/backend/base/langflow/interface/agents/prebuilt.py +++ /dev/null @@ -1,45 +0,0 @@ -from langchain.chains.llm import LLMChain -from langchain.agents import AgentExecutor, ZeroShotAgent -from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX -from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS -from langchain_community.agent_toolkits import JsonToolkit -from langchain_core.language_models import BaseLanguageModel - - -class MalfoyAgent(AgentExecutor): - """Json agent""" - - prefix = "Malfoy: " - - @classmethod - def initialize(cls, *args, **kwargs): - return cls.from_toolkit_and_llm(*args, **kwargs) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - @classmethod - def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel): - tools = toolkit.get_tools() - tool_names = {tool.name for tool in tools} - prompt = ZeroShotAgent.create_prompt( - tools, - prefix=JSON_PREFIX, - suffix=JSON_SUFFIX, - format_instructions=FORMAT_INSTRUCTIONS, - input_variables=None, - ) - llm_chain = LLMChain( - llm=llm, - prompt=prompt, - ) - agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names) # type: ignore - return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -PREBUILT_AGENTS = { - "MalfoyAgent": MalfoyAgent, -} diff --git a/src/backend/base/langflow/interface/base.py b/src/backend/base/langflow/interface/base.py deleted file mode 100644 index a300f12f2..000000000 --- a/src/backend/base/langflow/interface/base.py +++ /dev/null @@ -1,139 +0,0 @@ -from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional, Type, Union - -from langchain.agents import AgentExecutor -from langchain.chains.base import Chain -from loguru import logger -from pydantic import BaseModel - -from langflow.services.deps import get_settings_service -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.template.base import Template - -# Assuming necessary imports for Field, Template, and FrontendNode classes - - -class LangChainTypeCreator(BaseModel, ABC): - type_name: str - type_dict: Optional[Dict] = None - name_docs_dict: Optional[Dict[str, str]] = None - - @property - def frontend_node_class(self) -> Type[FrontendNode]: - """The class type of the FrontendNode created in frontend_node.""" - return FrontendNode - - @property - def docs_map(self) -> Dict[str, str]: - """A dict with the name of the component as key and the documentation link as value.""" - settings_service = get_settings_service() - if self.name_docs_dict is None: - try: - type_settings = getattr(settings_service.settings, self.type_name.upper()) - self.name_docs_dict = {name: value_dict["documentation"] for name, value_dict in type_settings.items()} - except AttributeError as exc: - logger.error(f"Error getting settings for {self.type_name}: {exc}") - - self.name_docs_dict = {} - return self.name_docs_dict - - @property - @abstractmethod - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - raise NotImplementedError - return self.type_dict - - @abstractmethod - def get_signature(self, name: str) -> Union[Optional[Dict[Any, Any]], FrontendNode]: - pass - - @abstractmethod - def to_list(self) -> List[str]: - pass - - def to_dict(self) -> Dict: - result: Dict = {self.type_name: {}} - - for name in self.to_list(): - # frontend_node.to_dict() returns a dict with the following structure: - # {name: {template: {fields}, description: str}} - # so we should update the result dict - node = self.frontend_node(name) - if node is not None: - node = node.to_dict() # type: ignore - result[self.type_name].update(node) - - return result - - def frontend_node(self, name) -> Union[FrontendNode, None]: - signature = self.get_signature(name) - if signature is None: - logger.error(f"Node {name} not loaded") - return signature - if not isinstance(signature, FrontendNode): - fields = [ - TemplateField( - name=key, - field_type=value["type"], - required=value.get("required", False), - placeholder=value.get("placeholder", ""), - is_list=value.get("list", False), - show=value.get("show", True), - multiline=value.get("multiline", False), - value=value.get("value", None), - file_types=value.get("fileTypes", []), - file_path=value.get("file_path", None), - ) - for key, value in signature["template"].items() - if key != "_type" - ] - template = Template(type_name=name, fields=fields) - signature = self.frontend_node_class( - template=template, - description=signature.get("description", ""), - base_classes=signature["base_classes"], - name=name, - ) - - signature.add_extra_fields() - signature.add_extra_base_classes() - signature.set_documentation(self.docs_map.get(name, "")) - return signature - - -class CustomChain(Chain, ABC): - """Custom chain""" - - @staticmethod - def function_name(): - return "CustomChain" - - @classmethod - def initialize(cls, *args, **kwargs): - pass - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -class CustomAgentExecutor(AgentExecutor, ABC): - """Custom chain""" - - @staticmethod - def function_name(): - return "CustomChain" - - @classmethod - def initialize(cls, *args, **kwargs): - pass - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) diff --git a/src/backend/base/langflow/interface/chains/__init__.py b/src/backend/base/langflow/interface/chains/__init__.py deleted file mode 100644 index 2e5570b3c..000000000 --- a/src/backend/base/langflow/interface/chains/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from langflow.interface.chains.base import ChainCreator - -__all__ = ["ChainCreator"] diff --git a/src/backend/base/langflow/interface/chains/base.py b/src/backend/base/langflow/interface/chains/base.py deleted file mode 100644 index e69b93614..000000000 --- a/src/backend/base/langflow/interface/chains/base.py +++ /dev/null @@ -1,77 +0,0 @@ -from typing import Any, ClassVar, Dict, List, Optional, Type - -from langchain import chains -from langchain_experimental.sql import SQLDatabaseChain -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.importing.utils import import_class -from langflow.interface.utils import build_template_from_class -from langflow.legacy_custom.customs import get_custom_nodes -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.chains import ChainFrontendNode -from langflow.utils.util import build_template_from_method -from loguru import logger - -# Assuming necessary imports for Field, Template, and FrontendNode classes - - -class ChainCreator(LangChainTypeCreator): - type_name: str = "chains" - - @property - def frontend_node_class(self) -> Type[ChainFrontendNode]: - return ChainFrontendNode - - #! We need to find a better solution for this - from_method_nodes: ClassVar[Dict] = { - "ConversationalRetrievalChain": "from_llm", - "LLMCheckerChain": "from_llm", - "SQLDatabaseChain": "from_llm", - } - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - settings_service = get_settings_service() - self.type_dict: dict[str, Any] = { - chain_name: import_class(f"langchain.chains.{chain_name}") for chain_name in chains.__all__ - } - from langflow.interface.chains.custom import CUSTOM_CHAINS - - self.type_dict["SQLDatabaseChain"] = SQLDatabaseChain - - self.type_dict.update(CUSTOM_CHAINS) - # Filter according to settings.chains - self.type_dict = { - name: chain - for name, chain in self.type_dict.items() - if name in settings_service.settings.CHAINS or settings_service.settings.DEV - } - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - try: - if name in get_custom_nodes(self.type_name).keys(): - return get_custom_nodes(self.type_name)[name] - elif name in self.from_method_nodes.keys(): - return build_template_from_method( - name, - type_to_cls_dict=self.type_to_loader_dict, - method_name=self.from_method_nodes[name], - add_function=True, - ) - return build_template_from_class(name, self.type_to_loader_dict, add_function=True) - except ValueError as exc: - raise ValueError(f"Chain {name} not found: {exc}") from exc - except AttributeError as exc: - logger.error(f"Chain {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - names = [] - for _, chain in self.type_to_loader_dict.items(): - chain_name = chain.function_name() if hasattr(chain, "function_name") else chain.__name__ - names.append(chain_name) - return names - - -chain_creator = ChainCreator() diff --git a/src/backend/base/langflow/interface/chains/custom.py b/src/backend/base/langflow/interface/chains/custom.py deleted file mode 100644 index af5a84c54..000000000 --- a/src/backend/base/langflow/interface/chains/custom.py +++ /dev/null @@ -1,118 +0,0 @@ -from typing import Dict, Optional, Type, Union -from langchain.chains import ConversationChain -from langchain.chains.question_answering import load_qa_chain -from langchain.memory.buffer import ConversationBufferMemory -from pydantic.v1 import Field, root_validator - -from langflow.interface.base import CustomChain -from langflow.interface.utils import extract_input_variables_from_prompt -from langchain_core.language_models import BaseLanguageModel -from langchain_core.memory import BaseMemory - -DEFAULT_SUFFIX = """" -Current conversation: -{history} -Human: {input} -{ai_prefix}""" - - -class BaseCustomConversationChain(ConversationChain): - """BaseCustomChain is a chain you can use to have a conversation with a custom character.""" - - template: Optional[str] - - ai_prefix_value: Optional[str] - """Field to use as the ai_prefix. It needs to be set and has to be in the template""" - - @root_validator(pre=False) - def build_template(cls, values): - format_dict = {} - input_variables = extract_input_variables_from_prompt(values["template"]) - - if values.get("ai_prefix_value", None) is None: - values["ai_prefix_value"] = values["memory"].ai_prefix - - for key in input_variables: - new_value = values.get(key, f"{{{key}}}") - format_dict[key] = new_value - if key == values.get("ai_prefix_value", None): - values["memory"].ai_prefix = new_value - - values["template"] = values["template"].format(**format_dict) - - values["template"] = values["template"] - values["input_variables"] = extract_input_variables_from_prompt(values["template"]) - values["prompt"].template = values["template"] - values["prompt"].input_variables = values["input_variables"] - return values - - -class SeriesCharacterChain(BaseCustomConversationChain): - """SeriesCharacterChain is a chain you can use to have a conversation with a character from a series.""" - - character: str - series: str - template: Optional[str] = """I want you to act like {character} from {series}. -I want you to respond and answer like {character}. do not write any explanations. only answer like {character}. -You must know all of the knowledge of {character}. -Current conversation: -{history} -Human: {input} -{character}:""" - memory: BaseMemory = Field(default_factory=ConversationBufferMemory) - ai_prefix_value: Optional[str] = "character" - """Default memory store.""" - - -class MidJourneyPromptChain(BaseCustomConversationChain): - """MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts.""" - - template: Optional[ - str - ] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program. - Your job is to provide detailed and creative descriptions that will inspire unique and interesting images from the AI. - Keep in mind that the AI is capable of understanding a wide range of language and can interpret abstract concepts, so feel free to be as imaginative and descriptive as possible. - For example, you could describe a scene from a futuristic city, or a surreal landscape filled with strange creatures. - The more detailed and imaginative your description, the more interesting the resulting image will be. Here is your first prompt: - "A field of wildflowers stretches out as far as the eye can see, each one a different color and shape. In the distance, a massive tree towers over the landscape, its branches reaching up to the sky like tentacles.\" - - Current conversation: - {history} - Human: {input} - AI:""" # noqa: E501 - - -class TimeTravelGuideChain(BaseCustomConversationChain): - template: Optional[ - str - ] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information. - Current conversation: - {history} - Human: {input} - AI:""" # noqa: E501 - - -class CombineDocsChain(CustomChain): - """Implementation of load_qa_chain function""" - - @staticmethod - def function_name(): - return "load_qa_chain" - - @classmethod - def initialize(cls, llm: BaseLanguageModel, chain_type: str): - return load_qa_chain(llm=llm, chain_type=chain_type) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def run(self, *args, **kwargs): - return super().run(*args, **kwargs) - - -CUSTOM_CHAINS: Dict[str, Type[Union[ConversationChain, CustomChain]]] = { - "CombineDocsChain": CombineDocsChain, - "SeriesCharacterChain": SeriesCharacterChain, - "MidJourneyPromptChain": MidJourneyPromptChain, - "TimeTravelGuideChain": TimeTravelGuideChain, -} diff --git a/src/backend/base/langflow/interface/custom/__init__.py b/src/backend/base/langflow/interface/custom/__init__.py deleted file mode 100644 index 5b87e9fa3..000000000 --- a/src/backend/base/langflow/interface/custom/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from langflow.interface.custom.base import CustomComponentCreator -from langflow.interface.custom.custom_component import CustomComponent - -__all__ = ["CustomComponentCreator", "CustomComponent"] diff --git a/src/backend/base/langflow/interface/custom/base.py b/src/backend/base/langflow/interface/custom/base.py deleted file mode 100644 index 573eacba1..000000000 --- a/src/backend/base/langflow/interface/custom/base.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import Any, Dict, List, Optional, Type - -from loguru import logger - -from langflow.interface.base import LangChainTypeCreator - -# from langflow.interface.custom.custom import CustomComponent -from langflow.interface.custom.custom_component import CustomComponent -from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode - - -class CustomComponentCreator(LangChainTypeCreator): - type_name: str = "custom_components" - - @property - def frontend_node_class(self) -> Type[CustomComponentFrontendNode]: - return CustomComponentFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - self.type_dict: dict[str, Any] = { - "CustomComponent": CustomComponent, - } - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - from langflow.legacy_custom.customs import get_custom_nodes - - try: - if name in get_custom_nodes(self.type_name).keys(): - return get_custom_nodes(self.type_name)[name] - except ValueError as exc: - raise ValueError(f"CustomComponent {name} not found: {exc}") from exc - except AttributeError as exc: - logger.error(f"CustomComponent {name} not loaded: {exc}") - return None - return None - - def to_list(self) -> List[str]: - return list(self.type_to_loader_dict.keys()) - - -custom_component_creator = CustomComponentCreator() diff --git a/src/backend/base/langflow/interface/custom_lists.py b/src/backend/base/langflow/interface/custom_lists.py deleted file mode 100644 index 27429e605..000000000 --- a/src/backend/base/langflow/interface/custom_lists.py +++ /dev/null @@ -1,66 +0,0 @@ -import inspect -from typing import Any - -from langchain import llms, memory, text_splitter -from langchain_community import agent_toolkits, document_loaders, embeddings -from langchain_community.chat_models import ChatVertexAI - -from langflow.interface.agents.custom import CUSTOM_AGENTS -from langflow.interface.chains.custom import CUSTOM_CHAINS -from langflow.interface.importing.utils import import_class -from langchain_anthropic import ChatAnthropic -from langchain_openai import AzureChatOpenAI, ChatOpenAI - -# LLMs -llm_type_to_cls_dict = {} - -for k, v in llms.get_type_to_cls_dict().items(): - try: - llm_type_to_cls_dict[k] = v() - except Exception: - pass -llm_type_to_cls_dict["anthropic-chat"] = ChatAnthropic # type: ignore -llm_type_to_cls_dict["azure-chat"] = AzureChatOpenAI # type: ignore -llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore -llm_type_to_cls_dict["vertexai-chat"] = ChatVertexAI # type: ignore - - -# Toolkits -toolkit_type_to_loader_dict: dict[str, Any] = { - toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}") - # if toolkit_name is lower case it is a loader - for toolkit_name in agent_toolkits.__all__ - if toolkit_name.islower() -} - -toolkit_type_to_cls_dict: dict[str, Any] = { - toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}") - # if toolkit_name is not lower case it is a class - for toolkit_name in agent_toolkits.__all__ - if not toolkit_name.islower() -} - -# Memories -memory_type_to_cls_dict: dict[str, Any] = { - memory_name: import_class(f"langchain.memory.{memory_name}") for memory_name in memory.__all__ -} - - -# Embeddings -embedding_type_to_cls_dict: dict[str, Any] = { - embedding_name: import_class(f"langchain_community.embeddings.{embedding_name}") - for embedding_name in embeddings.__all__ -} - - -# Document Loaders -documentloaders_type_to_cls_dict: dict[str, Any] = { - documentloader_name: import_class(f"langchain_community.document_loaders.{documentloader_name}") - for documentloader_name in document_loaders.__all__ -} - -# Text Splitters -textsplitter_type_to_cls_dict: dict[str, Any] = dict(inspect.getmembers(text_splitter, inspect.isclass)) - -# merge CUSTOM_AGENTS and CUSTOM_CHAINS -CUSTOM_NODES = {**CUSTOM_AGENTS, **CUSTOM_CHAINS} # type: ignore diff --git a/src/backend/base/langflow/interface/document_loaders/base.py b/src/backend/base/langflow/interface/document_loaders/base.py deleted file mode 100644 index 11bf0db42..000000000 --- a/src/backend/base/langflow/interface/document_loaders/base.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import Dict, List, Optional, Type - -from loguru import logger - -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.custom_lists import documentloaders_type_to_cls_dict -from langflow.interface.utils import build_template_from_class -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode - - -class DocumentLoaderCreator(LangChainTypeCreator): - type_name: str = "documentloaders" - - @property - def frontend_node_class(self) -> Type[DocumentLoaderFrontNode]: - return DocumentLoaderFrontNode - - @property - def type_to_loader_dict(self) -> Dict: - return documentloaders_type_to_cls_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of a document loader.""" - try: - return build_template_from_class(name, documentloaders_type_to_cls_dict) - except ValueError as exc: - raise ValueError(f"Documment Loader {name} not found") from exc - except AttributeError as exc: - logger.error(f"Documment Loader {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - settings_service = get_settings_service() - return [ - documentloader.__name__ - for documentloader in self.type_to_loader_dict.values() - if documentloader.__name__ in settings_service.settings.DOCUMENTLOADERS or settings_service.settings.DEV - ] - - -documentloader_creator = DocumentLoaderCreator() diff --git a/src/backend/base/langflow/interface/embeddings/base.py b/src/backend/base/langflow/interface/embeddings/base.py deleted file mode 100644 index 5fd7ad3b0..000000000 --- a/src/backend/base/langflow/interface/embeddings/base.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import Dict, List, Optional, Type - -from loguru import logger - -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.custom_lists import embedding_type_to_cls_dict -from langflow.interface.utils import build_template_from_class -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode - - -class EmbeddingCreator(LangChainTypeCreator): - type_name: str = "embeddings" - - @property - def type_to_loader_dict(self) -> Dict: - return embedding_type_to_cls_dict - - @property - def frontend_node_class(self) -> Type[FrontendNode]: - return EmbeddingFrontendNode - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of an embedding.""" - try: - return build_template_from_class(name, embedding_type_to_cls_dict) - except ValueError as exc: - raise ValueError(f"Embedding {name} not found") from exc - - except AttributeError as exc: - logger.error(f"Embedding {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - settings_service = get_settings_service() - return [ - embedding.__name__ - for embedding in self.type_to_loader_dict.values() - if embedding.__name__ in settings_service.settings.EMBEDDINGS or settings_service.settings.DEV - ] - - -embedding_creator = EmbeddingCreator() diff --git a/src/backend/base/langflow/interface/importing/utils.py b/src/backend/base/langflow/interface/importing/utils.py index a4f4904ac..963a51ccb 100644 --- a/src/backend/base/langflow/interface/importing/utils.py +++ b/src/backend/base/langflow/interface/importing/utils.py @@ -1,16 +1,7 @@ # This module is used to import any langchain class by name. import importlib -from typing import Any, Type - -from langchain.agents import Agent -from langchain.chains.base import Chain -from langchain_core.language_models.chat_models import BaseChatModel - -from langflow.interface.wrappers.base import wrapper_creator -from langchain_core.language_models import BaseLanguageModel -from langchain_core.prompts import PromptTemplate -from langchain_core.tools import BaseTool +from typing import Any def import_module(module_path: str) -> Any: @@ -27,134 +18,8 @@ def import_module(module_path: str) -> Any: return getattr(module, object_name) -def import_by_type(_type: str, name: str) -> Any: - """Import class by type and name""" - if _type is None: - raise ValueError(f"Type cannot be None. Check if {name} is in the config file.") - func_dict = { - "agents": import_agent, - "prompts": import_prompt, - "models": {"llm": import_llm, "chat": import_chat_llm}, - "tools": import_tool, - "chains": import_chain, - "toolkits": import_toolkit, - "wrappers": import_wrapper, - "memory": import_memory, - "embeddings": import_embedding, - "vectorstores": import_vectorstore, - "documentloaders": import_documentloader, - "textsplitters": import_textsplitter, - "utilities": import_utility, - "retrievers": import_retriever, - } - if _type == "models": - key = "chat" if "chat" in name.lower() else "llm" - loaded_func = func_dict[_type][key] # type: ignore - else: - loaded_func = func_dict[_type] - - return loaded_func(name) - - -def import_chat_llm(llm: str) -> BaseChatModel: - """Import chat llm from llm name""" - return import_class(f"langchain_community.chat_models.{llm}") - - -def import_retriever(retriever: str) -> Any: - """Import retriever from retriever name""" - return import_module(f"from langchain.retrievers import {retriever}") - - -def import_memory(memory: str) -> Any: - """Import memory from memory name""" - return import_module(f"from langchain.memory import {memory}") - - def import_class(class_path: str) -> Any: """Import class from class path""" module_path, class_name = class_path.rsplit(".", 1) module = import_module(module_path) return getattr(module, class_name) - - -def import_prompt(prompt: str) -> Type[PromptTemplate]: - """Import prompt from prompt name""" - from langflow.interface.prompts.custom import CUSTOM_PROMPTS - - if prompt == "ZeroShotPrompt": - return import_class("langchain.prompts.PromptTemplate") - elif prompt in CUSTOM_PROMPTS: - return CUSTOM_PROMPTS[prompt] - return import_class(f"langchain.prompts.{prompt}") - - -def import_wrapper(wrapper: str) -> Any: - """Import wrapper from wrapper name""" - if isinstance(wrapper_creator.type_dict, dict) and wrapper in wrapper_creator.type_dict: - return wrapper_creator.type_dict.get(wrapper) - - -def import_toolkit(toolkit: str) -> Any: - """Import toolkit from toolkit name""" - return import_module(f"from langchain.agents.agent_toolkits import {toolkit}") - - -def import_agent(agent: str) -> Agent: - """Import agent from agent name""" - # check for custom agent - - return import_class(f"langchain.agents.{agent}") - - -def import_llm(llm: str) -> BaseLanguageModel: - """Import llm from llm name""" - return import_class(f"langchain.llms.{llm}") - - -def import_tool(tool: str) -> BaseTool: - """Import tool from tool name""" - from langflow.interface.tools.base import tool_creator - - if tool in tool_creator.type_to_loader_dict: - return tool_creator.type_to_loader_dict[tool]["fcn"] - - return import_class(f"langchain.tools.{tool}") - - -def import_chain(chain: str) -> Type[Chain]: - """Import chain from chain name""" - from langflow.interface.chains.custom import CUSTOM_CHAINS - - if chain in CUSTOM_CHAINS: - return CUSTOM_CHAINS[chain] - if chain == "SQLDatabaseChain": - return import_class("langchain_experimental.sql.SQLDatabaseChain") - return import_class(f"langchain.chains.{chain}") - - -def import_embedding(embedding: str) -> Any: - """Import embedding from embedding name""" - return import_class(f"langchain_community.embeddings.{embedding}") - - -def import_vectorstore(vectorstore: str) -> Any: - """Import vectorstore from vectorstore name""" - return import_class(f"langchain_community.vectorstores.{vectorstore}") - - -def import_documentloader(documentloader: str) -> Any: - """Import documentloader from documentloader name""" - return import_class(f"langchain_community.document_loaders.{documentloader}") - - -def import_textsplitter(textsplitter: str) -> Any: - """Import textsplitter from textsplitter name""" - return import_class(f"langchain.text_splitter.{textsplitter}") - - -def import_utility(utility: str) -> Any: - """Import utility from utility name""" - if utility == "SQLDatabase": - return import_class(f"langchain_community.sql_database.{utility}") - return import_class(f"langchain_community.utilities.{utility}") diff --git a/src/backend/base/langflow/interface/initialize/loading.py b/src/backend/base/langflow/interface/initialize/loading.py index d5ebf7260..03de827b3 100644 --- a/src/backend/base/langflow/interface/initialize/loading.py +++ b/src/backend/base/langflow/interface/initialize/loading.py @@ -1,33 +1,13 @@ import inspect import json import os -from typing import TYPE_CHECKING, Any, Callable, Dict, Sequence, Type +from typing import TYPE_CHECKING, Any, Type import orjson -from langchain.agents import agent as agent_module -from langchain.agents.agent import AgentExecutor -from langchain.chains.base import Chain -from langchain_core.documents import Document from loguru import logger -from pydantic import ValidationError -from langflow.interface.custom.eval import eval_custom_component_code -from langflow.interface.importing.utils import import_by_type -from langflow.interface.initialize.llm import initialize_vertexai -from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type, handle_partial_variables -from langflow.interface.initialize.vector_store import vecstore_initializer -from langflow.interface.retrievers.base import retriever_creator -from langflow.interface.toolkits.base import toolkits_creator -from langflow.interface.utils import load_file_into_dict -from langflow.interface.wrappers.base import wrapper_creator +from langflow.custom.eval import eval_custom_component_code from langflow.schema.schema import Record -from langflow.utils import validate -from langflow.utils.util import unescape_string -from langchain_community.agent_toolkits.base import BaseToolkit -from langchain_core.document_loaders import BaseLoader -from langchain_core.tools import BaseTool -from langchain_core.vectorstores import VectorStore -from langchain_text_splitters import Language if TYPE_CHECKING: from langflow.custom import CustomComponent @@ -40,36 +20,19 @@ async def instantiate_class( user_id=None, ) -> Any: """Instantiate class from module type and key, and params""" - from langflow.interface.custom_lists import CUSTOM_NODES vertex_type = vertex.vertex_type base_type = vertex.base_type params = vertex.params params = convert_params_to_sets(params) params = convert_kwargs(params) - - if vertex_type in CUSTOM_NODES: - if custom_node := CUSTOM_NODES.get(vertex_type): - if hasattr(custom_node, "initialize"): - return custom_node.initialize(**params) - if callable(custom_node): - return custom_node(**params) - raise ValueError(f"Custom node {vertex_type} is not callable") logger.debug(f"Instantiating {vertex_type} of type {base_type}") if not base_type: raise ValueError("No base type provided for vertex") if base_type == "custom_components": return await instantiate_custom_component(params, user_id, vertex, fallback_to_env_vars=fallback_to_env_vars) - class_object = import_by_type(_type=base_type, name=vertex_type) - return await instantiate_based_on_type( - class_object=class_object, - base_type=base_type, - node_type=vertex_type, - params=params, - user_id=user_id, - vertex=vertex, - fallback_to_env_vars=fallback_to_env_vars, - ) + else: + raise ValueError(f"Base type {base_type} not found.") def convert_params_to_sets(params): @@ -96,45 +59,6 @@ def convert_kwargs(params): return params -async def instantiate_based_on_type(class_object, base_type, node_type, params, user_id, vertex, fallback_to_env_vars): - if base_type == "agents": - return instantiate_agent(node_type, class_object, params) - elif base_type == "prompts": - return instantiate_prompt(node_type, class_object, params) - elif base_type == "tools": - tool = instantiate_tool(node_type, class_object, params) - if hasattr(tool, "name") and isinstance(tool, BaseTool): - # tool name shouldn't contain spaces - tool.name = tool.name.replace(" ", "_") - return tool - elif base_type == "toolkits": - return instantiate_toolkit(node_type, class_object, params) - elif base_type == "embeddings": - return instantiate_embedding(node_type, class_object, params) - elif base_type == "vectorstores": - return instantiate_vectorstore(class_object, params) - elif base_type == "documentloaders": - return instantiate_documentloader(node_type, class_object, params) - elif base_type == "textsplitters": - return instantiate_textsplitter(class_object, params) - elif base_type == "utilities": - return instantiate_utility(node_type, class_object, params) - elif base_type == "chains": - return instantiate_chains(node_type, class_object, params) - elif base_type == "models": - return instantiate_llm(node_type, class_object, params) - elif base_type == "retrievers": - return instantiate_retriever(node_type, class_object, params) - elif base_type == "memory": - return instantiate_memory(node_type, class_object, params) - elif base_type == "custom_components": - return await instantiate_custom_component(params, user_id, vertex, fallback_to_env_vars=fallback_to_env_vars) - elif base_type == "wrappers": - return instantiate_wrapper(node_type, class_object, params) - else: - return class_object(**params) - - def update_params_with_load_from_db_fields( custom_component: "CustomComponent", params, load_from_db_fields, fallback_to_env_vars=False ): @@ -201,349 +125,3 @@ async def instantiate_custom_component(params, user_id, vertex, fallback_to_env_ if not isinstance(custom_repr, str): custom_repr = str(custom_repr) return custom_component, build_result, {"repr": custom_repr} - - -def instantiate_wrapper(node_type, class_object, params): - if node_type in wrapper_creator.from_method_nodes: - method = wrapper_creator.from_method_nodes[node_type] - if class_method := getattr(class_object, method, None): - return class_method(**params) - raise ValueError(f"Method {method} not found in {class_object}") - return class_object(**params) - - -def instantiate_llm(node_type, class_object, params: Dict): - # This is a workaround so JinaChat works until streaming is implemented - # if "openai_api_base" in params and "jina" in params["openai_api_base"]: - # False if condition is True - if "VertexAI" in node_type: - return initialize_vertexai(class_object=class_object, params=params) - # max_tokens sometimes is a string and should be an int - if "max_tokens" in params: - if isinstance(params["max_tokens"], str) and params["max_tokens"].isdigit(): - params["max_tokens"] = int(params["max_tokens"]) - elif not isinstance(params.get("max_tokens"), int): - params.pop("max_tokens", None) - return class_object(**params) - - -def instantiate_memory(node_type, class_object, params): - # process input_key and output_key to remove them if - # they are empty strings - if node_type == "ConversationEntityMemory": - params.pop("memory_key", None) - - for key in ["input_key", "output_key"]: - if key in params and (params[key] == "" or not params[key]): - params.pop(key) - - try: - if "retriever" in params and hasattr(params["retriever"], "as_retriever"): - params["retriever"] = params["retriever"].as_retriever() - return class_object(**params) - # I want to catch a specific attribute error that happens - # when the object does not have a cursor attribute - except Exception as exc: - if "object has no attribute 'cursor'" in str(exc) or 'object has no field "conn"' in str(exc): - raise AttributeError( - ( - "Failed to build connection to database." - f" Please check your connection string and try again. Error: {exc}" - ) - ) from exc - raise exc - - -def instantiate_retriever(node_type, class_object, params): - if "retriever" in params and hasattr(params["retriever"], "as_retriever"): - params["retriever"] = params["retriever"].as_retriever() - if node_type in retriever_creator.from_method_nodes: - method = retriever_creator.from_method_nodes[node_type] - if class_method := getattr(class_object, method, None): - return class_method(**params) - raise ValueError(f"Method {method} not found in {class_object}") - return class_object(**params) - - -def instantiate_chains(node_type, class_object: Type[Chain], params: Dict): - from langflow.interface.chains.base import chain_creator - - if "retriever" in params and hasattr(params["retriever"], "as_retriever"): - params["retriever"] = params["retriever"].as_retriever() - if node_type in chain_creator.from_method_nodes: - method = chain_creator.from_method_nodes[node_type] - if class_method := getattr(class_object, method, None): - return class_method(**params) - raise ValueError(f"Method {method} not found in {class_object}") - - return class_object(**params) - - -def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: Dict): - from langflow.interface.agents.base import agent_creator - - if node_type in agent_creator.from_method_nodes: - method = agent_creator.from_method_nodes[node_type] - if class_method := getattr(class_object, method, None): - agent = class_method(**params) - tools = params.get("tools", []) - return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, handle_parsing_errors=True) - return load_agent_executor(class_object, params) - - -def instantiate_prompt(node_type, class_object, params: Dict): - params, prompt = handle_node_type(node_type, class_object, params) - format_kwargs = handle_format_kwargs(prompt, params) - # Now we'll use partial_format to format the prompt - if format_kwargs: - prompt = handle_partial_variables(prompt, format_kwargs) - return prompt, format_kwargs - - -def instantiate_tool(node_type, class_object: Type[BaseTool], params: Dict): - if node_type == "JsonSpec": - if file_dict := load_file_into_dict(params.pop("path")): - params["dict_"] = file_dict - else: - raise ValueError("Invalid file") - return class_object(**params) - elif node_type == "PythonFunctionTool": - from langflow.interface.custom.utils import get_function - - params["func"] = get_function(params.get("code")) - return class_object(**params) - elif node_type == "PythonFunction": - function_string = params["code"] - if isinstance(function_string, str): - return validate.eval_function(function_string) - raise ValueError("Function should be a string") - elif node_type.lower() == "tool": - return class_object(**params) - return class_object(**params) - - -def instantiate_toolkit(node_type, class_object: Type[BaseToolkit], params: Dict): - loaded_toolkit = class_object(**params) - # Commenting this out for now to use toolkits as normal tools - # if toolkits_creator.has_create_function(node_type): - # return load_toolkits_executor(node_type, loaded_toolkit, params) - if isinstance(loaded_toolkit, BaseToolkit): - return loaded_toolkit.get_tools() - return loaded_toolkit - - -def instantiate_embedding(node_type, class_object, params: Dict): - params.pop("model", None) - params.pop("headers", None) - - if "VertexAI" in node_type: - return initialize_vertexai(class_object=class_object, params=params) - - if "OpenAIEmbedding" in node_type: - params["disallowed_special"] = () - - try: - return class_object(**params) - except ValidationError: - params = {key: value for key, value in params.items() if key in class_object.model_fields} - return class_object(**params) - - -def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict): - search_kwargs = params.pop("search_kwargs", {}) - if search_kwargs == {"yourkey": "value"}: - search_kwargs = {} - # clean up docs or texts to have only documents - if "texts" in params: - params["documents"] = params.pop("texts") - if "documents" in params: - params["documents"] = [doc for doc in params["documents"] if isinstance(doc, Document)] - if initializer := vecstore_initializer.get(class_object.__name__): - vecstore = initializer(class_object, params) - else: - if "texts" in params: - params["documents"] = params.pop("texts") - vecstore = class_object.from_documents(**params) - - # ! This might not work. Need to test - if search_kwargs and hasattr(vecstore, "as_retriever"): - vecstore = vecstore.as_retriever(search_kwargs=search_kwargs) - - return vecstore - - -def instantiate_documentloader(node_type: str, class_object: Type[BaseLoader], params: Dict): - if "file_filter" in params: - # file_filter will be a string but we need a function - # that will be used to filter the files using file_filter - # like lambda x: x.endswith(".txt") but as we don't know - # anything besides the string, we will simply check if the string is - # in x and if it is, we will return True - file_filter = params.pop("file_filter") - extensions = file_filter.split(",") - params["file_filter"] = lambda x: any(extension.strip() in x for extension in extensions) - metadata = params.pop("metadata", None) - if metadata and isinstance(metadata, str): - try: - metadata = orjson.loads(metadata) - except json.JSONDecodeError as exc: - raise ValueError("The metadata you provided is not a valid JSON string.") from exc - - if node_type == "WebBaseLoader": - if web_path := params.pop("web_path", None): - params["web_paths"] = [web_path] - - docs = class_object(**params).load() - # Now if metadata is an empty dict, we will not add it to the documents - if metadata: - for doc in docs: - # If the document already has metadata, we will not overwrite it - if not doc.metadata: - doc.metadata = metadata - else: - doc.metadata.update(metadata) - - return docs - - -def instantiate_textsplitter( - class_object, - params: Dict, -): - try: - documents = params.pop("documents") - if not isinstance(documents, list): - documents = [documents] - except KeyError as exc: - raise ValueError( - "The source you provided did not load correctly or was empty." - "Try changing the chunk_size of the Text Splitter." - ) from exc - - if ("separator_type" in params and params["separator_type"] == "Text") or "separator_type" not in params: - params.pop("separator_type", None) - # separators might come in as an escaped string like \\n - # so we need to convert it to a string - if "separators" in params: - if isinstance(params["separators"], str): - params["separators"] = unescape_string(params["separators"]) - elif isinstance(params["separators"], list): - params["separators"] = [unescape_string(separator) for separator in params["separators"]] - text_splitter = class_object(**params) - else: - language = params.pop("separator_type", None) - params["language"] = Language(language) - params.pop("separators", None) - - text_splitter = class_object.from_language(**params) - return text_splitter.split_documents(documents) - - -def instantiate_utility(node_type, class_object, params: Dict): - if node_type == "SQLDatabase": - return class_object.from_uri(params.pop("uri")) - return class_object(**params) - - -def replace_zero_shot_prompt_with_prompt_template(nodes): - """Replace ZeroShotPrompt with PromptTemplate""" - for node in nodes: - if node["data"]["type"] == "ZeroShotPrompt": - # Build Prompt Template - tools = [ - tool - for tool in nodes - if tool["type"] != "chatOutputNode" and "Tool" in tool["data"]["node"]["base_classes"] - ] - node["data"] = build_prompt_template(prompt=node["data"], tools=tools) - break - return nodes - - -def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs): - """Load agent executor from agent class, tools and chain""" - allowed_tools: Sequence[BaseTool] = params.get("allowed_tools", []) - llm_chain = params["llm_chain"] - # agent has hidden args for memory. might need to be support - # memory = params["memory"] - # if allowed_tools is not a list or set, make it a list - if not isinstance(allowed_tools, (list, set)) and isinstance(allowed_tools, BaseTool): - allowed_tools = [allowed_tools] - tool_names = [tool.name for tool in allowed_tools] - # Agent class requires an output_parser but Agent classes - # have a default output_parser. - agent = agent_class(allowed_tools=tool_names, llm_chain=llm_chain) # type: ignore - return AgentExecutor.from_agent_and_tools( - agent=agent, - tools=allowed_tools, - handle_parsing_errors=True, - # memory=memory, - **kwargs, - ) - - -def load_toolkits_executor(node_type: str, toolkit: BaseToolkit, params: dict): - create_function: Callable = toolkits_creator.get_create_function(node_type) - if llm := params.get("llm"): - return create_function(llm=llm, toolkit=toolkit) - - -def build_prompt_template(prompt, tools): - """Build PromptTemplate from ZeroShotPrompt""" - prefix = prompt["node"]["template"]["prefix"]["value"] - suffix = prompt["node"]["template"]["suffix"]["value"] - format_instructions = prompt["node"]["template"]["format_instructions"]["value"] - - tool_strings = "\n".join( - [f"{tool['data']['node']['name']}: {tool['data']['node']['description']}" for tool in tools] - ) - tool_names = ", ".join([tool["data"]["node"]["name"] for tool in tools]) - format_instructions = format_instructions.format(tool_names=tool_names) - value = "\n\n".join([prefix, tool_strings, format_instructions, suffix]) - - prompt["type"] = "PromptTemplate" - - prompt["node"] = { - "template": { - "_type": "prompt", - "input_variables": { - "type": "str", - "required": True, - "placeholder": "", - "list": True, - "show": False, - "multiline": False, - }, - "template": { - "type": "str", - "required": True, - "placeholder": "", - "list": False, - "show": True, - "multiline": True, - "value": value, - }, - "template_format": { - "type": "str", - "required": False, - "placeholder": "", - "list": False, - "show": False, - "multline": False, - "value": "f-string", - }, - "validate_template": { - "type": "bool", - "required": False, - "placeholder": "", - "list": False, - "show": False, - "multline": False, - "value": True, - }, - }, - "description": "Schema to represent a prompt for an LLM.", - "base_classes": ["BasePromptTemplate"], - } - - return prompt diff --git a/src/backend/base/langflow/interface/listing.py b/src/backend/base/langflow/interface/listing.py index a831f1098..a51e676db 100644 --- a/src/backend/base/langflow/interface/listing.py +++ b/src/backend/base/langflow/interface/listing.py @@ -21,7 +21,7 @@ class AllTypesDict(LazyLoadDictBase): from langflow.interface.types import get_all_types_dict settings_service = get_settings_service() - return get_all_types_dict(settings_service.settings.COMPONENTS_PATH) + return get_all_types_dict(settings_service.settings.components_path) lazy_load_dict = AllTypesDict() diff --git a/src/backend/base/langflow/interface/llms/__init__.py b/src/backend/base/langflow/interface/llms/__init__.py deleted file mode 100644 index c5d7186fb..000000000 --- a/src/backend/base/langflow/interface/llms/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from langflow.interface.llms.base import LLMCreator - -__all__ = ["LLMCreator"] diff --git a/src/backend/base/langflow/interface/llms/base.py b/src/backend/base/langflow/interface/llms/base.py deleted file mode 100644 index b7d91d674..000000000 --- a/src/backend/base/langflow/interface/llms/base.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Dict, List, Optional, Type - -from loguru import logger - -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.custom_lists import llm_type_to_cls_dict -from langflow.interface.utils import build_template_from_class -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.llms import LLMFrontendNode - - -class LLMCreator(LangChainTypeCreator): - type_name: str = "models" - - @property - def frontend_node_class(self) -> Type[LLMFrontendNode]: - return LLMFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - self.type_dict = llm_type_to_cls_dict - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of an llm.""" - try: - return build_template_from_class(name, llm_type_to_cls_dict) - except ValueError as exc: - raise ValueError("LLM not found") from exc - - except AttributeError as exc: - logger.error(f"LLM {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - settings_service = get_settings_service() - return [ - llm.__name__ - for llm in self.type_to_loader_dict.values() - if llm.__name__ in settings_service.settings.LLMS or settings_service.settings.DEV - ] - - -llm_creator = LLMCreator() diff --git a/src/backend/base/langflow/interface/memories/__init__.py b/src/backend/base/langflow/interface/memories/__init__.py deleted file mode 100644 index 845eb29fe..000000000 --- a/src/backend/base/langflow/interface/memories/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from langflow.interface.memories.base import MemoryCreator - -__all__ = ["MemoryCreator"] diff --git a/src/backend/base/langflow/interface/memories/base.py b/src/backend/base/langflow/interface/memories/base.py deleted file mode 100644 index ea0eabbf1..000000000 --- a/src/backend/base/langflow/interface/memories/base.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import ClassVar, Dict, List, Optional, Type - -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.custom_lists import memory_type_to_cls_dict -from langflow.interface.utils import build_template_from_class -from langflow.legacy_custom.customs import get_custom_nodes -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.frontend_node.memories import MemoryFrontendNode -from langflow.utils.util import build_template_from_method -from loguru import logger - - -class MemoryCreator(LangChainTypeCreator): - type_name: str = "memories" - - from_method_nodes: ClassVar[Dict] = { - "ZepChatMessageHistory": "__init__", - "SQLiteEntityStore": "__init__", - } - - @property - def frontend_node_class(self) -> Type[FrontendNode]: - """The class type of the FrontendNode created in frontend_node.""" - return MemoryFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - self.type_dict = memory_type_to_cls_dict - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of a memory.""" - try: - if name in get_custom_nodes(self.type_name).keys(): - return get_custom_nodes(self.type_name)[name] - elif name in self.from_method_nodes: - return build_template_from_method( - name, - type_to_cls_dict=memory_type_to_cls_dict, - method_name=self.from_method_nodes[name], - ) - return build_template_from_class(name, memory_type_to_cls_dict) - except ValueError as exc: - raise ValueError("Memory not found") from exc - except AttributeError as exc: - logger.error(f"Memory {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - settings_service = get_settings_service() - return [ - memory.__name__ - for memory in self.type_to_loader_dict.values() - if memory.__name__ in settings_service.settings.MEMORIES or settings_service.settings.DEV - ] - - -memory_creator = MemoryCreator() diff --git a/src/backend/base/langflow/interface/prompts/__init__.py b/src/backend/base/langflow/interface/prompts/__init__.py deleted file mode 100644 index 2a81e8bf0..000000000 --- a/src/backend/base/langflow/interface/prompts/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from langflow.interface.prompts.base import PromptCreator - -__all__ = ["PromptCreator"] diff --git a/src/backend/base/langflow/interface/prompts/base.py b/src/backend/base/langflow/interface/prompts/base.py deleted file mode 100644 index b9662e0cd..000000000 --- a/src/backend/base/langflow/interface/prompts/base.py +++ /dev/null @@ -1,64 +0,0 @@ -from typing import Dict, List, Optional, Type - -from langchain import prompts -from loguru import logger -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.importing.utils import import_class -from langflow.interface.utils import build_template_from_class -from langflow.legacy_custom.customs import get_custom_nodes -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.prompts import PromptFrontendNode - - -class PromptCreator(LangChainTypeCreator): - type_name: str = "prompts" - - @property - def frontend_node_class(self) -> Type[PromptFrontendNode]: - return PromptFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - settings_service = get_settings_service() - if self.type_dict is None: - self.type_dict = { - prompt_name: import_class(f"langchain.prompts.{prompt_name}") - # if prompt_name is not lower case it is a class - for prompt_name in prompts.__all__ - } - # Merge CUSTOM_PROMPTS into self.type_dict - from langflow.interface.prompts.custom import CUSTOM_PROMPTS - - self.type_dict.update(CUSTOM_PROMPTS) - # Now filter according to settings.prompts - self.type_dict = { - name: prompt - for name, prompt in self.type_dict.items() - if name in settings_service.settings.PROMPTS or settings_service.settings.DEV - } - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - try: - if name in get_custom_nodes(self.type_name).keys(): - return get_custom_nodes(self.type_name)[name] - return build_template_from_class(name, self.type_to_loader_dict) - except ValueError as exc: - # raise ValueError("Prompt not found") from exc - logger.error(f"Prompt {name} not found: {exc}") - except AttributeError as exc: - logger.error(f"Prompt {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - custom_prompts = get_custom_nodes("prompts") - # library_prompts = [ - # prompt.__annotations__["return"].__name__ - # for prompt in self.type_to_loader_dict.values() - # if prompt.__annotations__["return"].__name__ in settings.prompts - # or settings.dev - # ] - return list(self.type_to_loader_dict.keys()) + list(custom_prompts.keys()) - - -prompt_creator = PromptCreator() diff --git a/src/backend/base/langflow/interface/prompts/custom.py b/src/backend/base/langflow/interface/prompts/custom.py deleted file mode 100644 index e90ce8812..000000000 --- a/src/backend/base/langflow/interface/prompts/custom.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import Dict, List, Optional, Type -from pydantic.v1 import root_validator - -from langflow.interface.utils import extract_input_variables_from_prompt -from langchain_core.prompts import PromptTemplate - -# Steps to create a BaseCustomPrompt: -# 1. Create a prompt template that endes with: -# Current conversation: -# {history} -# Human: {input} -# {ai_prefix}: -# 2. Create a class that inherits from BaseCustomPrompt -# 3. Add the following class attributes: -# template: str = "" -# description: Optional[str] -# ai_prefix: Optional[str] = "{ai_prefix}" -# 3.1. The ai_prefix should be a value in input_variables -# SeriesCharacterPrompt is a working example -# If used in a LLMChain, with a Memory module, it will work as expected -# We should consider creating ConversationalChains that expose custom parameters -# That way it will be easier to create custom prompts - - -class BaseCustomPrompt(PromptTemplate): - template: str = "" - description: Optional[str] - ai_prefix: Optional[str] - - @root_validator(pre=False) - def build_template(cls, values): - format_dict = {} - ai_prefix_format_dict = {} - for key in values.get("input_variables", []): - new_value = values.get(key, f"{{{key}}}") - format_dict[key] = new_value - if key in values["ai_prefix"]: - ai_prefix_format_dict[key] = new_value - - values["ai_prefix"] = values["ai_prefix"].format(**ai_prefix_format_dict) - values["template"] = values["template"].format(**format_dict) - - values["template"] = values["template"] - values["input_variables"] = extract_input_variables_from_prompt(values["template"]) - return values - - -class SeriesCharacterPrompt(BaseCustomPrompt): - # Add a very descriptive description for the prompt generator - description: Optional[str] = "A prompt that asks the AI to act like a character from a series." - character: str - series: str - template: str = """I want you to act like {character} from {series}. -I want you to respond and answer like {character}. do not write any explanations. only answer like {character}. -You must know all of the knowledge of {character}. - -Current conversation: -{history} -Human: {input} -{character}:""" - - ai_prefix: str = "{character}" - input_variables: List[str] = ["character", "series"] - - -CUSTOM_PROMPTS: Dict[str, Type[BaseCustomPrompt]] = {"SeriesCharacterPrompt": SeriesCharacterPrompt} diff --git a/src/backend/base/langflow/interface/retrievers/__init__.py b/src/backend/base/langflow/interface/retrievers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/base/langflow/interface/retrievers/base.py b/src/backend/base/langflow/interface/retrievers/base.py deleted file mode 100644 index 6eefe18db..000000000 --- a/src/backend/base/langflow/interface/retrievers/base.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Any, ClassVar, Dict, List, Optional, Type - -from langchain_community import retrievers -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.importing.utils import import_class -from langflow.interface.utils import build_template_from_class -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.retrievers import RetrieverFrontendNode -from langflow.utils.util import build_template_from_method -from loguru import logger - - -class RetrieverCreator(LangChainTypeCreator): - type_name: str = "retrievers" - - from_method_nodes: ClassVar[Dict] = { - "MultiQueryRetriever": "from_llm", - "ZepRetriever": "__init__", - } - - @property - def frontend_node_class(self) -> Type[RetrieverFrontendNode]: - return RetrieverFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - self.type_dict: dict[str, Any] = { - retriever_name: import_class(f"langchain_community.retrievers.{retriever_name}") - for retriever_name in retrievers.__all__ - } - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of an embedding.""" - try: - if name in self.from_method_nodes: - return build_template_from_method( - name, - type_to_cls_dict=self.type_to_loader_dict, - method_name=self.from_method_nodes[name], - ) - else: - return build_template_from_class(name, type_to_cls_dict=self.type_to_loader_dict) - except ValueError as exc: - raise ValueError(f"Retriever {name} not found") from exc - except AttributeError as exc: - logger.error(f"Retriever {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - settings_service = get_settings_service() - return [ - retriever - for retriever in self.type_to_loader_dict.keys() - if retriever in settings_service.settings.RETRIEVERS or settings_service.settings.DEV - ] - - -retriever_creator = RetrieverCreator() diff --git a/src/backend/base/langflow/interface/text_splitters/__init__.py b/src/backend/base/langflow/interface/text_splitters/__init__.py deleted file mode 100644 index 4bb9dd1b0..000000000 --- a/src/backend/base/langflow/interface/text_splitters/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from langflow.interface.text_splitters.base import TextSplitterCreator - -__all__ = ["TextSplitterCreator"] diff --git a/src/backend/base/langflow/interface/text_splitters/base.py b/src/backend/base/langflow/interface/text_splitters/base.py deleted file mode 100644 index 69d9799b3..000000000 --- a/src/backend/base/langflow/interface/text_splitters/base.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import Dict, List, Optional, Type - -from loguru import logger - -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.custom_lists import textsplitter_type_to_cls_dict -from langflow.interface.utils import build_template_from_class -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode - - -class TextSplitterCreator(LangChainTypeCreator): - type_name: str = "textsplitters" - - @property - def frontend_node_class(self) -> Type[TextSplittersFrontendNode]: - return TextSplittersFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - return textsplitter_type_to_cls_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of a text splitter.""" - try: - return build_template_from_class(name, textsplitter_type_to_cls_dict) - except ValueError as exc: - raise ValueError(f"Text Splitter {name} not found") from exc - except AttributeError as exc: - logger.error(f"Text Splitter {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - settings_service = get_settings_service() - return [ - textsplitter.__name__ - for textsplitter in self.type_to_loader_dict.values() - if textsplitter.__name__ in settings_service.settings.TEXTSPLITTERS or settings_service.settings.DEV - ] - - -textsplitter_creator = TextSplitterCreator() diff --git a/src/backend/base/langflow/interface/toolkits/__init__.py b/src/backend/base/langflow/interface/toolkits/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/base/langflow/interface/toolkits/base.py b/src/backend/base/langflow/interface/toolkits/base.py deleted file mode 100644 index eca7ae3b7..000000000 --- a/src/backend/base/langflow/interface/toolkits/base.py +++ /dev/null @@ -1,71 +0,0 @@ -import warnings -from typing import Callable, Dict, List, Optional - -from langchain.agents import agent_toolkits -from loguru import logger -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.importing.utils import import_class, import_module -from langflow.interface.utils import build_template_from_class -from langflow.services.deps import get_settings_service - - -class ToolkitCreator(LangChainTypeCreator): - type_name: str = "toolkits" - all_types: List[str] = agent_toolkits.__all__ - create_functions: Dict = { - "JsonToolkit": [], - "SQLDatabaseToolkit": [], - "OpenAPIToolkit": ["create_openapi_agent"], - "VectorStoreToolkit": [ - "create_vectorstore_agent", - "create_vectorstore_router_agent", - "VectorStoreInfo", - ], - "ZapierToolkit": [], - "PandasToolkit": ["create_pandas_dataframe_agent"], - "CSVToolkit": ["create_csv_agent"], - } - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - settings_service = get_settings_service() - self.type_dict = { - toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}") - # if toolkit_name is not lower case it is a class - for toolkit_name in agent_toolkits.__all__ - if not toolkit_name.islower() and toolkit_name in settings_service.settings.TOOLKITS - } - - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - try: - template = build_template_from_class(name, self.type_to_loader_dict) - # add Tool to base_classes - if "toolkit" in name.lower() and template: - template["base_classes"].append("Tool") - return template - except ValueError as exc: - raise ValueError("Toolkit not found") from exc - except AttributeError as exc: - logger.error(f"Toolkit {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - return list(self.type_to_loader_dict.keys()) - - def get_create_function(self, name: str) -> Callable: - if loader_name := self.create_functions.get(name): - return import_module(f"from langchain.agents.agent_toolkits import {loader_name[0]}") - else: - raise ValueError("Toolkit not found") - - def has_create_function(self, name: str) -> bool: - # check if the function list is not empty - return bool(self.create_functions.get(name, None)) - - -toolkits_creator = ToolkitCreator() diff --git a/src/backend/base/langflow/interface/toolkits/custom.py b/src/backend/base/langflow/interface/toolkits/custom.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/base/langflow/interface/tools/__init__.py b/src/backend/base/langflow/interface/tools/__init__.py deleted file mode 100644 index 148892e90..000000000 --- a/src/backend/base/langflow/interface/tools/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from langflow.interface.tools.base import ToolCreator - -__all__ = ["ToolCreator"] diff --git a/src/backend/base/langflow/interface/tools/base.py b/src/backend/base/langflow/interface/tools/base.py deleted file mode 100644 index f64192a3b..000000000 --- a/src/backend/base/langflow/interface/tools/base.py +++ /dev/null @@ -1,170 +0,0 @@ -from typing import Dict, List, Optional - -from langchain.agents.load_tools import _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS - -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.tools.constants import ALL_TOOLS_NAMES, CUSTOM_TOOLS, FILE_TOOLS, OTHER_TOOLS -from langflow.interface.tools.util import get_tool_params -from langflow.legacy_custom import customs -from langflow.services.deps import get_settings_service -from langflow.template.field.base import TemplateField -from langflow.template.template.base import Template -from langflow.utils import util -from langflow.utils.logger import logger -from langflow.interface.utils import build_template_from_class - -TOOL_INPUTS = { - "str": TemplateField( - field_type="str", - required=True, - is_list=False, - show=True, - placeholder="", - value="", - ), - "llm": TemplateField(field_type="BaseLanguageModel", required=True, is_list=False, show=True), - "func": TemplateField( - field_type="Callable", - required=True, - is_list=False, - show=True, - multiline=True, - ), - "code": TemplateField( - field_type="str", - required=True, - is_list=False, - show=True, - value="", - multiline=True, - ), - "path": TemplateField( - field_type="file", - required=True, - is_list=False, - show=True, - value="", - file_types=[".json", ".yaml", ".yml"], - ), -} - - -class ToolCreator(LangChainTypeCreator): - type_name: str = "tools" - tools_dict: Optional[Dict] = None - - @property - def type_to_loader_dict(self) -> Dict: - settings_service = get_settings_service() - if self.tools_dict is None: - all_tools = {} - - for tool, tool_fcn in ALL_TOOLS_NAMES.items(): - try: - tool_params = get_tool_params(tool_fcn) - except Exception: - logger.error(f"Error getting params for tool {tool}") - continue - - tool_name = tool_params.get("name") or tool - - if tool_name in settings_service.settings.TOOLS or settings_service.settings.DEV: - if tool_name == "JsonSpec": - tool_params["path"] = tool_params.pop("dict_") # type: ignore - all_tools[tool_name] = { - "type": tool, - "params": tool_params, - "fcn": tool_fcn, - } - - self.tools_dict = all_tools - - return self.tools_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of a tool.""" - - base_classes = ["Tool", "BaseTool"] - fields = [] - params = [] - tool_params = {} - - # Raise error if name is not in tools - if name not in self.type_to_loader_dict.keys(): - raise ValueError("Tool not found") - - tool_type: str = self.type_to_loader_dict[name]["type"] # type: ignore - - # if tool_type in _BASE_TOOLS.keys(): - # params = [] - if tool_type in _LLM_TOOLS.keys(): - params = ["llm"] - elif tool_type in _EXTRA_LLM_TOOLS.keys(): - extra_keys = _EXTRA_LLM_TOOLS[tool_type][1] - params = ["llm"] + extra_keys - elif tool_type in _EXTRA_OPTIONAL_TOOLS.keys(): - extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type][1] - params = extra_keys - # elif tool_type == "Tool": - # params = ["name", "description", "func"] - elif tool_type in CUSTOM_TOOLS: - # Get custom tool params - params = self.type_to_loader_dict[name]["params"] # type: ignore - base_classes = ["Callable"] - if node := customs.get_custom_nodes("tools").get(tool_type): - return node - elif tool_type in FILE_TOOLS: - params = self.type_to_loader_dict[name]["params"] # type: ignore - base_classes += [name] - elif tool_type in OTHER_TOOLS: - tool_dict = build_template_from_class(tool_type, OTHER_TOOLS) - fields = tool_dict["template"] - - # _type is the only key in fields - # return None - if len(fields) == 1 and "_type" in fields: - return None - - # Pop unnecessary fields and add name - fields.pop("_type") # type: ignore - fields.pop("return_direct", None) # type: ignore - fields.pop("verbose", None) # type: ignore - - tool_params = { - "name": fields.pop("name")["value"], # type: ignore - "description": fields.pop("description")["value"], # type: ignore - } - - fields = [ - TemplateField(name=name, field_type=field["type"], **field) - for name, field in fields.items() # type: ignore - ] - base_classes += tool_dict["base_classes"] - - # Copy the field and add the name - for param in params: - field = TOOL_INPUTS.get(param, TOOL_INPUTS["str"]).copy() - field.name = param - field.advanced = False - if param == "aiosession": - field.show = False - field.required = False - - fields.append(field) - - template = Template(fields=fields, type_name=tool_type) - - tool_params = {**tool_params, **self.type_to_loader_dict[name]["params"]} - return { - "template": util.format_dict(template.to_dict()), - **tool_params, - "base_classes": base_classes, - } - - def to_list(self) -> List[str]: - """List all load tools""" - - return list(self.type_to_loader_dict.keys()) - - -tool_creator = ToolCreator() diff --git a/src/backend/base/langflow/interface/tools/constants.py b/src/backend/base/langflow/interface/tools/constants.py deleted file mode 100644 index 39e3b7465..000000000 --- a/src/backend/base/langflow/interface/tools/constants.py +++ /dev/null @@ -1,25 +0,0 @@ -from langchain import tools -from langchain.agents.load_tools import _BASE_TOOLS, _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS -from langchain_community.tools.json.tool import JsonSpec - -from langflow.interface.importing.utils import import_class -from langflow.interface.tools.custom import PythonFunctionTool -from langchain_core.tools import Tool - -FILE_TOOLS = {"JsonSpec": JsonSpec} -CUSTOM_TOOLS = { - "Tool": Tool, - "PythonFunctionTool": PythonFunctionTool, -} - -OTHER_TOOLS = {tool: import_class(f"langchain_community.tools.{tool}") for tool in tools.__all__} - -ALL_TOOLS_NAMES = { - **_BASE_TOOLS, - **_LLM_TOOLS, # type: ignore - **{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()}, # type: ignore - **{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()}, - **CUSTOM_TOOLS, - **FILE_TOOLS, # type: ignore - **OTHER_TOOLS, -} diff --git a/src/backend/base/langflow/interface/tools/custom.py b/src/backend/base/langflow/interface/tools/custom.py deleted file mode 100644 index 8afaa10da..000000000 --- a/src/backend/base/langflow/interface/tools/custom.py +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Callable, Optional -from pydantic.v1 import BaseModel, validator - -from langflow.interface.custom.utils import get_function -from langflow.utils import validate -from langchain_core.tools import Tool - - -class Function(BaseModel): - code: str - function: Optional[Callable] = None - imports: Optional[str] = None - - # Eval code and store the function - def __init__(self, **data): - super().__init__(**data) - - # Validate the function - @validator("code") - def validate_func(cls, v): - try: - validate.eval_function(v) - except Exception as e: - raise e - - return v - - def get_function(self): - """Get the function""" - function_name = validate.extract_function_name(self.code) - - return validate.create_function(self.code, function_name) - - -class PythonFunctionTool(Function, Tool): - name: str = "Custom Tool" - description: str - code: str - - def ___init__(self, name: str, description: str, code: str): - self.name = name - self.description = description - self.code = code - self.func = get_function(self.code) - super().__init__(name=name, description=description, func=self.func) - - -class PythonFunction(Function): - code: str diff --git a/src/backend/base/langflow/interface/tools/util.py b/src/backend/base/langflow/interface/tools/util.py deleted file mode 100644 index f572efe5e..000000000 --- a/src/backend/base/langflow/interface/tools/util.py +++ /dev/null @@ -1,99 +0,0 @@ -import ast -import inspect -import textwrap -from typing import Dict, Union -from langchain_core.tools import Tool - - -def get_func_tool_params(func, **kwargs) -> Union[Dict, None]: - tree = ast.parse(textwrap.dedent(inspect.getsource(func))) - - # Iterate over the statements in the abstract syntax tree - for node in ast.walk(tree): - # Find the first return statement - if isinstance(node, ast.Return): - tool = node.value - if isinstance(tool, ast.Call): - if isinstance(tool.func, ast.Name) and tool.func.id == "Tool": - if tool.keywords: - tool_params = {} - for keyword in tool.keywords: - if keyword.arg == "name": - try: - tool_params["name"] = ast.literal_eval(keyword.value) - except ValueError: - break - elif keyword.arg == "description": - try: - tool_params["description"] = ast.literal_eval(keyword.value) - except ValueError: - continue - - return tool_params - return { - "name": ast.literal_eval(tool.args[0]), - "description": ast.literal_eval(tool.args[2]), - } - # - else: - # get the class object from the return statement - try: - class_obj = eval(compile(ast.Expression(tool), "", "eval")) - except Exception: - return None - - return { - "name": getattr(class_obj, "name"), - "description": getattr(class_obj, "description"), - } - # Return None if no return statement was found - return None - - -def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]: - tree = ast.parse(textwrap.dedent(inspect.getsource(cls))) - - tool_params = {} - - # Iterate over the statements in the abstract syntax tree - for node in ast.walk(tree): - if isinstance(node, ast.ClassDef): - # Find the class definition and look for methods - for stmt in node.body: - if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__": - # There is no assignment statements in the __init__ method - # So we need to get the params from the function definition - for arg in stmt.args.args: - if arg.arg == "name": - # It should be the name of the class - tool_params[arg.arg] = cls.__name__ - elif arg.arg == "self": - continue - # If there is not default value, set it to an empty string - else: - try: - annotation = ast.literal_eval(arg.annotation) # type: ignore - tool_params[arg.arg] = annotation - except ValueError: - tool_params[arg.arg] = "" - # Get the attribute name and the annotation - elif cls != Tool and isinstance(stmt, ast.AnnAssign): - # Get the attribute name and the annotation - tool_params[stmt.target.id] = "" # type: ignore - - return tool_params - - -def get_tool_params(tool, **kwargs) -> Dict: - # Parse the function code into an abstract syntax tree - # Define if it is a function or a class - if inspect.isfunction(tool): - return get_func_tool_params(tool, **kwargs) or {} - elif inspect.isclass(tool): - # Get the parameters necessary to - # instantiate the class - - return get_class_tool_params(tool, **kwargs) or {} - - else: - raise ValueError("Tool must be a function or class.") diff --git a/src/backend/base/langflow/interface/types.py b/src/backend/base/langflow/interface/types.py index 46fa44a37..a092a7d19 100644 --- a/src/backend/base/langflow/interface/types.py +++ b/src/backend/base/langflow/interface/types.py @@ -1,69 +1,10 @@ -from cachetools import LRUCache, cached - -from langflow.interface.agents.base import agent_creator -from langflow.interface.chains.base import chain_creator -from langflow.interface.custom.directory_reader.utils import merge_nested_dicts_with_renaming -from langflow.interface.custom.utils import build_custom_components -from langflow.interface.document_loaders.base import documentloader_creator -from langflow.interface.embeddings.base import embedding_creator -from langflow.interface.llms.base import llm_creator -from langflow.interface.memories.base import memory_creator -from langflow.interface.retrievers.base import retriever_creator -from langflow.interface.text_splitters.base import textsplitter_creator -from langflow.interface.toolkits.base import toolkits_creator -from langflow.interface.tools.base import tool_creator -from langflow.interface.wrappers.base import wrapper_creator - - -# Used to get the base_classes list -def get_type_list(): - """Get a list of all langchain types""" - all_types = build_langchain_types_dict() - - # all_types.pop("tools") - - for key, value in all_types.items(): - all_types[key] = [item["template"]["_type"] for item in value.values()] - - return all_types - - -@cached(LRUCache(maxsize=1)) -def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union - """Build a dictionary of all langchain types""" - all_types = {} - - creators = [ - chain_creator, - agent_creator, - # prompt_creator, - llm_creator, - memory_creator, - tool_creator, - toolkits_creator, - wrapper_creator, - embedding_creator, - # vectorstore_creator, - documentloader_creator, - textsplitter_creator, - # utility_creator, - retriever_creator, - ] - - all_types = {} - for creator in creators: - created_types = creator.to_dict() - if created_types[creator.type_name].values(): - all_types.update(created_types) - - return all_types +from langflow.custom.utils import build_custom_components def get_all_types_dict(components_paths): """Get all types dictionary combining native and custom components.""" - native_components = build_langchain_types_dict() custom_components_from_file = build_custom_components(components_paths=components_paths) - return merge_nested_dicts_with_renaming(native_components, custom_components_from_file) + return custom_components_from_file def get_all_components(components_paths, as_dict=False): diff --git a/src/backend/base/langflow/interface/utilities/__init__.py b/src/backend/base/langflow/interface/utilities/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/base/langflow/interface/utilities/base.py b/src/backend/base/langflow/interface/utilities/base.py deleted file mode 100644 index 474bf8ca2..000000000 --- a/src/backend/base/langflow/interface/utilities/base.py +++ /dev/null @@ -1,65 +0,0 @@ -from typing import Dict, List, Optional, Type - -from langchain_community import utilities -from loguru import logger -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.importing.utils import import_class -from langflow.interface.utils import build_template_from_class -from langflow.legacy_custom.customs import get_custom_nodes -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.utilities import UtilitiesFrontendNode - - -class UtilityCreator(LangChainTypeCreator): - type_name: str = "utilities" - - @property - def frontend_node_class(self) -> Type[UtilitiesFrontendNode]: - return UtilitiesFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - """ - Returns a dictionary mapping utility names to their corresponding loader classes. - If the dictionary has not been created yet, it is created by importing all utility classes - from the langchain.chains module and filtering them according to the settings.utilities list. - """ - if self.type_dict is None: - settings_service = get_settings_service() - self.type_dict = {} - for utility_name in utilities.__all__: - try: - imported = import_class(f"langchain_community.utilities.{utility_name}") - self.type_dict[utility_name] = imported - except Exception: - pass - - self.type_dict["SQLDatabase"] = utilities.SQLDatabase - # Filter according to settings.utilities - self.type_dict = { - name: utility - for name, utility in self.type_dict.items() - if name in settings_service.settings.UTILITIES or settings_service.settings.DEV - } - - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of a utility.""" - try: - custom_nodes = get_custom_nodes(self.type_name) - if name in custom_nodes.keys(): - return custom_nodes[name] - return build_template_from_class(name, self.type_to_loader_dict) - except ValueError as exc: - raise ValueError(f"Utility {name} not found") from exc - - except AttributeError as exc: - logger.error(f"Utility {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - return list(self.type_to_loader_dict.keys()) - - -utility_creator = UtilityCreator() diff --git a/src/backend/base/langflow/interface/utils.py b/src/backend/base/langflow/interface/utils.py index 252d5f411..986352f15 100644 --- a/src/backend/base/langflow/interface/utils.py +++ b/src/backend/base/langflow/interface/utils.py @@ -7,12 +7,13 @@ from typing import Dict import yaml from docstring_parser import parse +from langchain_core.language_models import BaseLanguageModel +from loguru import logger +from PIL.Image import Image + from langflow.services.chat.config import ChatConfig from langflow.services.deps import get_settings_service from langflow.utils.util import format_dict, get_base_classes, get_default_factory -from loguru import logger -from PIL.Image import Image -from langchain_core.language_models import BaseLanguageModel def load_file_into_dict(file_path: str) -> dict: @@ -95,13 +96,14 @@ def setup_llm_caching(): try: set_langchain_cache(settings_service.settings) except ImportError: - logger.warning(f"Could not import {settings_service.settings.CACHE_TYPE}. ") + logger.warning(f"Could not import {settings_service.settings.cache_type}. ") except Exception as exc: logger.warning(f"Could not setup LLM caching. Error: {exc}") def set_langchain_cache(settings): from langchain.globals import set_llm_cache + from langflow.interface.importing.utils import import_class if cache_type := os.getenv("LANGFLOW_LANGCHAIN_CACHE"): diff --git a/src/backend/base/langflow/interface/vector_store/__init__.py b/src/backend/base/langflow/interface/vector_store/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/base/langflow/interface/vector_store/base.py b/src/backend/base/langflow/interface/vector_store/base.py deleted file mode 100644 index 893c78fca..000000000 --- a/src/backend/base/langflow/interface/vector_store/base.py +++ /dev/null @@ -1,52 +0,0 @@ -from typing import Any, Dict, List, Optional, Type - -from langchain import vectorstores -from loguru import logger - -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.importing.utils import import_class -from langflow.services.deps import get_settings_service -from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode -from langflow.utils.util import build_template_from_method - - -class VectorstoreCreator(LangChainTypeCreator): - type_name: str = "vectorstores" - - @property - def frontend_node_class(self) -> Type[VectorStoreFrontendNode]: - return VectorStoreFrontendNode - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - self.type_dict: dict[str, Any] = { - vectorstore_name: import_class(f"langchain_community.vectorstores.{vectorstore_name}") - for vectorstore_name in vectorstores.__all__ - } - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - """Get the signature of an embedding.""" - try: - return build_template_from_method( - name, - type_to_cls_dict=self.type_to_loader_dict, - method_name="from_texts", - ) - except ValueError as exc: - raise ValueError(f"Vector Store {name} not found") from exc - except AttributeError as exc: - logger.error(f"Vector Store {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - settings_service = get_settings_service() - return [ - vectorstore - for vectorstore in self.type_to_loader_dict.keys() - if vectorstore in settings_service.settings.VECTORSTORES or settings_service.settings.DEV - ] - - -vectorstore_creator = VectorstoreCreator() diff --git a/src/backend/base/langflow/interface/wrappers/__init__.py b/src/backend/base/langflow/interface/wrappers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/backend/base/langflow/interface/wrappers/base.py b/src/backend/base/langflow/interface/wrappers/base.py deleted file mode 100644 index b850d345f..000000000 --- a/src/backend/base/langflow/interface/wrappers/base.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Dict, List, Optional - -from langchain_community.utilities import requests -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.utils import build_template_from_class -from loguru import logger - - -class WrapperCreator(LangChainTypeCreator): - type_name: str = "wrappers" - - @property - def type_to_loader_dict(self) -> Dict: - if self.type_dict is None: - self.type_dict = {wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper]} - return self.type_dict - - def get_signature(self, name: str) -> Optional[Dict]: - try: - return build_template_from_class(name, self.type_to_loader_dict) - except ValueError as exc: - raise ValueError("Wrapper not found") from exc - except AttributeError as exc: - logger.error(f"Wrapper {name} not loaded: {exc}") - return None - - def to_list(self) -> List[str]: - return list(self.type_to_loader_dict.keys()) - - -wrapper_creator = WrapperCreator() diff --git a/src/backend/base/langflow/legacy_custom/customs.py b/src/backend/base/langflow/legacy_custom/customs.py index ff69064ff..26e5e33fa 100644 --- a/src/backend/base/langflow/legacy_custom/customs.py +++ b/src/backend/base/langflow/legacy_custom/customs.py @@ -2,33 +2,6 @@ from langflow.template import frontend_node # These should always be instantiated CUSTOM_NODES: dict[str, dict[str, frontend_node.base.FrontendNode]] = { - # "prompts": { - # "ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode(), - # }, - "tools": { - "PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(), - "Tool": frontend_node.tools.ToolNode(), - }, - "agents": { - "JsonAgent": frontend_node.agents.JsonAgentNode(), - "CSVAgent": frontend_node.agents.CSVAgentNode(), - "VectorStoreAgent": frontend_node.agents.VectorStoreAgentNode(), - "VectorStoreRouterAgent": frontend_node.agents.VectorStoreRouterAgentNode(), - "SQLAgent": frontend_node.agents.SQLAgentNode(), - }, - "utilities": { - "SQLDatabase": frontend_node.agents.SQLDatabaseNode(), - }, - "memories": { - "PostgresChatMessageHistory": frontend_node.memories.PostgresChatMessageHistoryFrontendNode(), - "MongoDBChatMessageHistory": frontend_node.memories.MongoDBChatMessageHistoryFrontendNode(), - }, - "chains": { - "SeriesCharacterChain": frontend_node.chains.SeriesCharacterChainNode(), - "TimeTravelGuideChain": frontend_node.chains.TimeTravelGuideChainNode(), - "MidJourneyPromptChain": frontend_node.chains.MidJourneyPromptChainNode(), - "load_qa_chain": frontend_node.chains.CombineDocsChainNode(), - }, "custom_components": { "CustomComponent": frontend_node.custom_components.CustomComponentFrontendNode(), }, diff --git a/src/backend/base/langflow/load.py b/src/backend/base/langflow/load.py deleted file mode 100644 index 1262ac4b9..000000000 --- a/src/backend/base/langflow/load.py +++ /dev/null @@ -1 +0,0 @@ -from langflow.processing.load import load_flow_from_json, run_flow_from_json # noqa: F401 diff --git a/src/backend/base/langflow/load/__init__.py b/src/backend/base/langflow/load/__init__.py new file mode 100644 index 000000000..2002e8bb1 --- /dev/null +++ b/src/backend/base/langflow/load/__init__.py @@ -0,0 +1,3 @@ +from .load import load_flow_from_json, run_flow_from_json # noqa: F401 + +__all__ = ["load_flow_from_json", "run_flow_from_json"] diff --git a/src/backend/base/langflow/processing/load.py b/src/backend/base/langflow/load/load.py similarity index 100% rename from src/backend/base/langflow/processing/load.py rename to src/backend/base/langflow/load/load.py diff --git a/src/backend/base/langflow/main.py b/src/backend/base/langflow/main.py index 697cfa226..c81c014e2 100644 --- a/src/backend/base/langflow/main.py +++ b/src/backend/base/langflow/main.py @@ -14,7 +14,11 @@ from rich import print as rprint from starlette.middleware.base import BaseHTTPMiddleware from langflow.api import router -from langflow.initial_setup.setup import create_or_update_starter_projects +from langflow.initial_setup.setup import ( + create_or_update_starter_projects, + initialize_super_user_if_needed, + load_flows_from_directory, +) from langflow.interface.utils import setup_llm_caching from langflow.services.plugins.langfuse_plugin import LangfuseInstance from langflow.services.utils import initialize_services, teardown_services @@ -33,22 +37,22 @@ class JavaScriptMIMETypeMiddleware(BaseHTTPMiddleware): return response -def get_lifespan(fix_migration=False, socketio_server=None): - from langflow.version import __version__ # type: ignore - +def get_lifespan(fix_migration=False, socketio_server=None, version=None): @asynccontextmanager async def lifespan(app: FastAPI): nest_asyncio.apply() # Startup message - if __version__: - rprint(f"[bold green]Starting Langflow v{__version__}...[/bold green]") + if version: + rprint(f"[bold green]Starting Langflow v{version}...[/bold green]") else: rprint("[bold green]Starting Langflow...[/bold green]") try: initialize_services(fix_migration=fix_migration, socketio_server=socketio_server) setup_llm_caching() LangfuseInstance.update() + initialize_super_user_if_needed() create_or_update_starter_projects() + load_flows_from_directory() yield except Exception as exc: if "langflow migration --fix" not in str(exc): @@ -63,11 +67,17 @@ def get_lifespan(fix_migration=False, socketio_server=None): def create_app(): """Create the FastAPI app and include the router.""" + try: + from langflow.version import __version__ # type: ignore + except ImportError: + from importlib.metadata import version + + __version__ = version("langflow-base") configure() socketio_server = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True) - lifespan = get_lifespan(socketio_server=socketio_server) - app = FastAPI(lifespan=lifespan) + lifespan = get_lifespan(socketio_server=socketio_server, version=__version__) + app = FastAPI(lifespan=lifespan, title="Langflow", version=__version__) origins = ["*"] app.add_middleware( diff --git a/src/backend/base/langflow/processing/base.py b/src/backend/base/langflow/processing/base.py index 35e46a3b2..26da99842 100644 --- a/src/backend/base/langflow/processing/base.py +++ b/src/backend/base/langflow/processing/base.py @@ -1,11 +1,9 @@ from typing import TYPE_CHECKING, List, Union -from langchain.agents.agent import AgentExecutor +from langchain_core.callbacks import BaseCallbackHandler from loguru import logger -from langflow.processing.process import fix_memory_inputs, format_actions from langflow.services.deps import get_plugins_service -from langchain_core.callbacks import BaseCallbackHandler if TYPE_CHECKING: from langfuse.callback import CallbackHandler # type: ignore @@ -44,48 +42,3 @@ def flush_langfuse_callback_if_present(callbacks: List[Union[BaseCallbackHandler if hasattr(callback, "langfuse") and hasattr(callback.langfuse, "flush"): callback.langfuse.flush() break - - -async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwargs): - """Get result and thought from extracted json""" - - try: - if hasattr(langchain_object, "verbose"): - langchain_object.verbose = True - - if hasattr(langchain_object, "return_intermediate_steps"): - # https://github.com/hwchase17/langchain/issues/2068 - # Deactivating until we have a frontend solution - # to display intermediate steps - langchain_object.return_intermediate_steps = True - try: - if not isinstance(langchain_object, AgentExecutor): - fix_memory_inputs(langchain_object) - except Exception as exc: - logger.error(f"Error fixing memory inputs: {exc}") - - trace_id = kwargs.pop("session_id", None) - try: - callbacks = setup_callbacks(sync=False, trace_id=trace_id, **kwargs) - output = await langchain_object.acall(inputs, callbacks=callbacks) - except Exception as exc: - # make the error message more informative - logger.debug(f"Error: {str(exc)}") - callbacks = setup_callbacks(sync=True, trace_id=trace_id, **kwargs) - output = langchain_object(inputs, callbacks=callbacks) - - # if langfuse callback is present, run callback.langfuse.flush() - flush_langfuse_callback_if_present(callbacks) - - intermediate_steps = output.get("intermediate_steps", []) if isinstance(output, dict) else [] - - result = output.get(langchain_object.output_keys[0]) if isinstance(output, dict) else output - try: - thought = format_actions(intermediate_steps) if intermediate_steps else "" - except Exception as exc: - logger.exception(exc) - thought = "" - except Exception as exc: - logger.exception(exc) - raise ValueError(f"Error: {str(exc)}") from exc - return result, thought, output diff --git a/src/backend/base/langflow/processing/process.py b/src/backend/base/langflow/processing/process.py index 326e8ca3d..d53b5e25f 100644 --- a/src/backend/base/langflow/processing/process.py +++ b/src/backend/base/langflow/processing/process.py @@ -1,127 +1,19 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union - -from langchain.agents import AgentExecutor from loguru import logger from pydantic import BaseModel from langflow.graph.graph.base import Graph from langflow.graph.schema import RunOutputs from langflow.graph.vertex.base import Vertex -from langflow.interface.run import get_memory_key, update_memory_keys from langflow.schema.graph import InputValue, Tweaks from langflow.schema.schema import INPUT_FIELD_NAME from langflow.services.session.service import SessionService -from langchain_core.agents import AgentAction - if TYPE_CHECKING: from langflow.api.v1.schemas import InputValueRequest -def fix_memory_inputs(langchain_object): - """ - Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the - object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the - get_memory_key function and updates the memory keys using the update_memory_keys function. - """ - if not hasattr(langchain_object, "memory") or langchain_object.memory is None: - return - try: - if ( - hasattr(langchain_object.memory, "memory_key") - and langchain_object.memory.memory_key in langchain_object.input_variables - ): - return - except AttributeError: - input_variables = ( - langchain_object.prompt.input_variables - if hasattr(langchain_object, "prompt") - else langchain_object.input_keys - ) - if langchain_object.memory.memory_key in input_variables: - return - - possible_new_mem_key = get_memory_key(langchain_object) - if possible_new_mem_key is not None: - update_memory_keys(langchain_object, possible_new_mem_key) - - -def format_actions(actions: List[Tuple[AgentAction, str]]) -> str: - """Format a list of (AgentAction, answer) tuples into a string.""" - output = [] - for action, answer in actions: - log = action.log - tool = action.tool - tool_input = action.tool_input - output.append(f"Log: {log}") - if "Action" not in log and "Action Input" not in log: - output.append(f"Tool: {tool}") - output.append(f"Tool Input: {tool_input}") - output.append(f"Answer: {answer}") - output.append("") # Add a blank line - return "\n".join(output) - - -def get_result_and_thought(langchain_object: Any, inputs: dict): - """Get result and thought from extracted json""" - try: - if hasattr(langchain_object, "verbose"): - langchain_object.verbose = True - - if hasattr(langchain_object, "return_intermediate_steps"): - langchain_object.return_intermediate_steps = False - - try: - if not isinstance(langchain_object, AgentExecutor): - fix_memory_inputs(langchain_object) - except Exception as exc: - logger.error(f"Error fixing memory inputs: {exc}") - - try: - output = langchain_object(inputs, return_only_outputs=True) - except ValueError as exc: - # make the error message more informative - logger.debug(f"Error: {str(exc)}") - output = langchain_object.run(inputs) - - except Exception as exc: - raise ValueError(f"Error: {str(exc)}") from exc - return output - - -def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]: - """Get input string if only one input is provided""" - return list(inputs.values())[0] if len(inputs) == 1 else None - - -def process_inputs( - inputs: Optional[Union[dict, List[dict]]] = None, - artifacts: Optional[Dict[str, Any]] = None, -) -> Union[dict, List[dict]]: - if inputs is None: - inputs = {} - if artifacts is None: - artifacts = {} - - if isinstance(inputs, dict): - inputs = update_inputs_dict(inputs, artifacts) - elif isinstance(inputs, List): - inputs = [update_inputs_dict(inp, artifacts) for inp in inputs] - - return inputs - - -def update_inputs_dict(inputs: dict, artifacts: Dict[str, Any]) -> dict: - for key, value in artifacts.items(): - if key == "repr": - continue - elif key not in inputs or not inputs[key]: - inputs[key] = value - - return inputs - - class Result(BaseModel): result: Any session_id: str diff --git a/src/backend/base/langflow/services/auth/utils.py b/src/backend/base/langflow/services/auth/utils.py index f8396077c..0e0aead88 100644 --- a/src/backend/base/langflow/services/auth/utils.py +++ b/src/backend/base/langflow/services/auth/utils.py @@ -76,11 +76,6 @@ async def get_current_user( if token: return await get_current_user_by_jwt(token, db) else: - if not query_param and not header_param: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="An API key as query or header, or a JWT token must be passed", - ) user = await api_key_security(query_param, header_param, db) if user: return user @@ -216,15 +211,14 @@ def create_super_user( def create_user_longterm_token(db: Session = Depends(get_session)) -> tuple[UUID, dict]: settings_service = get_settings_service() + username = settings_service.auth_settings.SUPERUSER - password = settings_service.auth_settings.SUPERUSER_PASSWORD - if not username or not password: + super_user = get_user_by_username(db, username) + if not super_user: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail="Missing first superuser credentials", + detail="Super user hasn't been created" ) - super_user = create_super_user(db=db, username=username, password=password) - access_token_expires_longterm = timedelta(days=365) access_token = create_token( data={"sub": str(super_user.id)}, diff --git a/src/backend/base/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py index a64b47f3a..b04eb6417 100644 --- a/src/backend/base/langflow/services/cache/factory.py +++ b/src/backend/base/langflow/services/cache/factory.py @@ -16,14 +16,14 @@ class CacheServiceFactory(ServiceFactory): # Here you would have logic to create and configure a CacheService # based on the settings_service - if settings_service.settings.CACHE_TYPE == "redis": + if settings_service.settings.cache_type == "redis": logger.debug("Creating Redis cache") redis_cache = RedisCache( - host=settings_service.settings.REDIS_HOST, - port=settings_service.settings.REDIS_PORT, - db=settings_service.settings.REDIS_DB, - url=settings_service.settings.REDIS_URL, - expiration_time=settings_service.settings.REDIS_CACHE_EXPIRE, + host=settings_service.settings.redis_host, + port=settings_service.settings.redis_port, + db=settings_service.settings.redis_db, + url=settings_service.settings.redis_url, + expiration_time=settings_service.settings.redis_cache_expire, ) if redis_cache.is_connected(): logger.debug("Redis cache is connected") @@ -31,7 +31,7 @@ class CacheServiceFactory(ServiceFactory): logger.warning("Redis cache is not connected, falling back to in-memory cache") return ThreadingInMemoryCache() - elif settings_service.settings.CACHE_TYPE == "memory": + elif settings_service.settings.cache_type == "memory": return ThreadingInMemoryCache() - elif settings_service.settings.CACHE_TYPE == "async": + elif settings_service.settings.cache_type == "async": return AsyncInMemoryCache() diff --git a/src/backend/base/langflow/services/cache/service.py b/src/backend/base/langflow/services/cache/service.py index 4ae4dc540..2aa187b22 100644 --- a/src/backend/base/langflow/services/cache/service.py +++ b/src/backend/base/langflow/services/cache/service.py @@ -9,6 +9,9 @@ from loguru import logger from langflow.services.base import Service from langflow.services.cache.base import AsyncBaseCacheService, CacheService +from langflow.services.cache.utils import CacheMiss + +CACHE_MISS = CacheMiss() class ThreadingInMemoryCache(CacheService, Service): @@ -341,12 +344,14 @@ class AsyncInMemoryCache(AsyncBaseCacheService, Service): async def _get(self, key): item = self.cache.get(key, None) - if item and (time.time() - item["time"] < self.expiration_time): - self.cache.move_to_end(key) - return pickle.loads(item["value"]) if isinstance(item["value"], bytes) else item["value"] if item: - await self.delete(key) - return None + if time.time() - item["time"] < self.expiration_time: + self.cache.move_to_end(key) + return pickle.loads(item["value"]) if isinstance(item["value"], bytes) else item["value"] + else: + logger.info(f"Cache item for key '{key}' has expired and will be deleted.") + await self.delete(key) # Log before deleting the expired item + return CACHE_MISS async def set(self, key, value, lock: Optional[asyncio.Lock] = None): if not lock: diff --git a/src/backend/base/langflow/services/cache/utils.py b/src/backend/base/langflow/services/cache/utils.py index 129e9a6b7..ff19836ef 100644 --- a/src/backend/base/langflow/services/cache/utils.py +++ b/src/backend/base/langflow/services/cache/utils.py @@ -19,6 +19,11 @@ CACHE_DIR = user_cache_dir("langflow", "langflow") PREFIX = "langflow_cache" +class CacheMiss: + def __repr__(self): + return "" + + def create_cache_folder(func): def wrapper(*args, **kwargs): # Get the destination folder diff --git a/src/backend/base/langflow/services/chat/service.py b/src/backend/base/langflow/services/chat/service.py index 072920418..042a541a3 100644 --- a/src/backend/base/langflow/services/chat/service.py +++ b/src/backend/base/langflow/services/chat/service.py @@ -13,7 +13,7 @@ class ChatService(Service): self._cache_locks = defaultdict(asyncio.Lock) self.cache_service = get_cache_service() - async def set_cache(self, flow_id: str, data: Any, lock: Optional[asyncio.Lock] = None) -> bool: + async def set_cache(self, key: str, data: Any, lock: Optional[asyncio.Lock] = None) -> bool: """ Set the cache for a client. """ @@ -23,17 +23,17 @@ class ChatService(Service): "result": data, "type": type(data), } - await self.cache_service.upsert(flow_id, result_dict, lock=lock or self._cache_locks[flow_id]) - return flow_id in self.cache_service + await self.cache_service.upsert(key, result_dict, lock=lock or self._cache_locks[key]) + return key in self.cache_service - async def get_cache(self, flow_id: str, lock: Optional[asyncio.Lock] = None) -> Any: + async def get_cache(self, key: str, lock: Optional[asyncio.Lock] = None) -> Any: """ Get the cache for a client. """ - return await self.cache_service.get(flow_id, lock=lock or self._cache_locks[flow_id]) + return await self.cache_service.get(key, lock=lock or self._cache_locks[key]) - async def clear_cache(self, flow_id: str, lock: Optional[asyncio.Lock] = None): + async def clear_cache(self, key: str, lock: Optional[asyncio.Lock] = None): """ Clear the cache for a client. """ - await self.cache_service.delete(flow_id, lock=lock or self._cache_locks[flow_id]) + await self.cache_service.delete(key, lock=lock or self._cache_locks[key]) diff --git a/src/backend/base/langflow/services/chat/utils.py b/src/backend/base/langflow/services/chat/utils.py deleted file mode 100644 index 271c0e85b..000000000 --- a/src/backend/base/langflow/services/chat/utils.py +++ /dev/null @@ -1,53 +0,0 @@ -from typing import Any - -from langchain.agents import AgentExecutor -from langchain.chains.base import Chain -from langchain_core.runnables import Runnable -from loguru import logger - -from langflow.api.v1.schemas import ChatMessage -from langflow.interface.utils import try_setting_streaming_options -from langflow.processing.base import get_result_and_steps - -LANGCHAIN_RUNNABLES = (Chain, Runnable, AgentExecutor) - - -async def process_graph( - build_result, - chat_inputs: ChatMessage, - client_id: str, - session_id: str, -): - build_result = try_setting_streaming_options(build_result) - logger.debug("Loaded langchain object") - - if build_result is None: - # Raise user facing error - raise ValueError("There was an error loading the langchain_object. Please, check all the nodes and try again.") - - # Generate result and thought - try: - if chat_inputs.message is None: - logger.debug("No message provided") - chat_inputs.message = {} - - logger.debug("Generating result and thought") - if isinstance(build_result, LANGCHAIN_RUNNABLES): - result, intermediate_steps, raw_output = await get_result_and_steps( - build_result, - chat_inputs.message, - client_id=client_id, - session_id=session_id, - ) - else: - raise TypeError(f"Unknown type {type(build_result)}") - logger.debug("Generated result and intermediate_steps") - return result, intermediate_steps, raw_output - except Exception as e: - # Log stack trace - logger.exception(e) - raise e - - -async def run_build_result(build_result: Any, chat_inputs: ChatMessage, client_id: str, session_id: str): - return build_result(inputs=chat_inputs.message) diff --git a/src/backend/base/langflow/services/database/factory.py b/src/backend/base/langflow/services/database/factory.py index 3b03da131..7f7a142b5 100644 --- a/src/backend/base/langflow/services/database/factory.py +++ b/src/backend/base/langflow/services/database/factory.py @@ -1,6 +1,5 @@ from typing import TYPE_CHECKING - from langflow.services.database.service import DatabaseService from langflow.services.factory import ServiceFactory @@ -14,6 +13,6 @@ class DatabaseServiceFactory(ServiceFactory): def create(self, settings_service: "SettingsService"): # Here you would have logic to create and configure a DatabaseService - if not settings_service.settings.DATABASE_URL: + if not settings_service.settings.database_url: raise ValueError("No database URL provided") - return DatabaseService(settings_service.settings.DATABASE_URL) + return DatabaseService(settings_service.settings.database_url) diff --git a/src/backend/base/langflow/services/database/models/api_key/model.py b/src/backend/base/langflow/services/database/models/api_key/model.py index be4e3ed4d..cb216d9ae 100644 --- a/src/backend/base/langflow/services/database/models/api_key/model.py +++ b/src/backend/base/langflow/services/database/models/api_key/model.py @@ -2,7 +2,7 @@ from datetime import datetime, timezone from typing import TYPE_CHECKING, Optional from uuid import UUID, uuid4 -from pydantic import field_validator, validator +from pydantic import field_validator from sqlmodel import Column, DateTime, Field, Relationship, SQLModel, func if TYPE_CHECKING: @@ -40,6 +40,7 @@ class ApiKeyCreate(ApiKeyBase): created_at: Optional[datetime] = Field(default_factory=utc_now) @field_validator("created_at", mode="before") + @classmethod def set_created_at(cls, v): return v or utc_now() @@ -52,10 +53,11 @@ class UnmaskedApiKeyRead(ApiKeyBase): class ApiKeyRead(ApiKeyBase): id: UUID - api_key: str = Field() + api_key: str = Field(schema_extra={"validate_default": True}) user_id: UUID = Field() - @validator("api_key", always=True) + @field_validator("api_key") + @classmethod def mask_api_key(cls, v): # This validator will always run, and will mask the API key return f"{v[:8]}{'*' * (len(v) - 8)}" diff --git a/src/backend/base/langflow/services/database/models/flow/model.py b/src/backend/base/langflow/services/database/models/flow/model.py index 17b5e8931..4de1e0bc8 100644 --- a/src/backend/base/langflow/services/database/models/flow/model.py +++ b/src/backend/base/langflow/services/database/models/flow/model.py @@ -1,5 +1,6 @@ # Path: src/backend/langflow/services/database/models/flow/model.py +import re import warnings from datetime import datetime, timezone from typing import TYPE_CHECKING, Dict, Optional @@ -7,7 +8,9 @@ from uuid import UUID, uuid4 import emoji from emoji import purely_emoji # type: ignore +from fastapi import HTTPException, status from pydantic import field_serializer, field_validator +from sqlalchemy import UniqueConstraint from sqlmodel import JSON, Column, Field, Relationship, SQLModel from langflow.schema.schema import Record @@ -25,7 +28,26 @@ class FlowBase(SQLModel): data: Optional[Dict] = Field(default=None, nullable=True) is_component: Optional[bool] = Field(default=False, nullable=True) updated_at: Optional[datetime] = Field(default_factory=lambda: datetime.now(timezone.utc), nullable=True) + webhook: Optional[bool] = Field(default=False, nullable=True, description="Can be used on the webhook endpoint") folder_id: Optional[UUID] = Field(default=None, nullable=True) + endpoint_name: Optional[str] = Field(default=None, nullable=True, index=True) + + @field_validator("endpoint_name") + @classmethod + def validate_endpoint_name(cls, v): + # Endpoint name must be a string containing only letters, numbers, hyphens, and underscores + if v is not None: + if not isinstance(v, str): + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail="Endpoint name must be a string", + ) + if not re.match(r"^[a-zA-Z0-9_-]+$", v): + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail="Endpoint name must contain only letters, numbers, hyphens, and underscores", + ) + return v @field_validator("icon_bg_color") def validate_icon_bg_color(cls, v): @@ -93,10 +115,15 @@ class FlowBase(SQLModel): # updated_at can be serialized to JSON @field_serializer("updated_at") - def serialize_dt(self, dt: datetime, _info): - if dt is None: - return None - return dt.isoformat() + def serialize_datetime(value): + if isinstance(value, datetime): + # I'm getting 2024-05-29T17:57:17.631346 + # and I want 2024-05-29T17:57:17-05:00 + value = value.replace(microsecond=0) + if value.tzinfo is None: + value = value.replace(tzinfo=timezone.utc) + return value.isoformat() + return value @field_validator("updated_at", mode="before") def validate_dt(cls, v): @@ -128,6 +155,11 @@ class Flow(FlowBase, table=True): record = Record(data=data) return record + __table_args__ = ( + UniqueConstraint("user_id", "name", name="unique_flow_name"), + UniqueConstraint("user_id", "endpoint_name", name="unique_flow_endpoint_name"), + ) + class FlowCreate(FlowBase): user_id: Optional[UUID] = None @@ -145,3 +177,21 @@ class FlowUpdate(SQLModel): description: Optional[str] = None data: Optional[Dict] = None folder_id: Optional[UUID] = None + endpoint_name: Optional[str] = None + + @field_validator("endpoint_name") + @classmethod + def validate_endpoint_name(cls, v): + # Endpoint name must be a string containing only letters, numbers, hyphens, and underscores + if v is not None: + if not isinstance(v, str): + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail="Endpoint name must be a string", + ) + if not re.match(r"^[a-zA-Z0-9_-]+$", v): + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail="Endpoint name must contain only letters, numbers, hyphens, and underscores", + ) + return v diff --git a/src/backend/base/langflow/services/database/models/flow/utils.py b/src/backend/base/langflow/services/database/models/flow/utils.py new file mode 100644 index 000000000..b8ea9d658 --- /dev/null +++ b/src/backend/base/langflow/services/database/models/flow/utils.py @@ -0,0 +1,33 @@ +from typing import Optional + +from fastapi import Depends +from sqlmodel import Session + +from langflow.services.deps import get_session + +from .model import Flow + + +def get_flow_by_id(session: Session = Depends(get_session), flow_id: Optional[str] = None) -> Flow | None: + """Get flow by id.""" + + if flow_id is None: + raise ValueError("Flow id is required.") + + return session.get(Flow, flow_id) + + +def get_webhook_component_in_flow(flow_data: dict): + """Get webhook component in flow data.""" + + for node in flow_data.get("nodes", []): + if "Webhook" in node.get("id"): + return node + return None + + +def get_all_webhook_components_in_flow(flow_data: dict | None): + """Get all webhook components in flow data.""" + if not flow_data: + return [] + return [node for node in flow_data.get("nodes", []) if "Webhook" in node.get("id")] diff --git a/src/backend/base/langflow/services/database/models/folder/model.py b/src/backend/base/langflow/services/database/models/folder/model.py index 6ce038c63..dc2dfaa80 100644 --- a/src/backend/base/langflow/services/database/models/folder/model.py +++ b/src/backend/base/langflow/services/database/models/folder/model.py @@ -1,6 +1,7 @@ from typing import TYPE_CHECKING, List, Optional from uuid import UUID, uuid4 +from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel from langflow.services.database.models.flow.model import FlowRead @@ -30,6 +31,8 @@ class Folder(FolderBase, table=True): back_populates="folder", sa_relationship_kwargs={"cascade": "all, delete, delete-orphan"} ) + __table_args__ = (UniqueConstraint("user_id", "name", name="unique_folder_name"),) + class FolderCreate(FolderBase): components_list: Optional[List[UUID]] = None diff --git a/src/backend/base/langflow/services/database/service.py b/src/backend/base/langflow/services/database/service.py index 14c79f85b..674c6c645 100644 --- a/src/backend/base/langflow/services/database/service.py +++ b/src/backend/base/langflow/services/database/service.py @@ -37,7 +37,7 @@ class DatabaseService(Service): def _create_engine(self) -> "Engine": """Create the engine for the database.""" settings_service = get_settings_service() - if settings_service.settings.DATABASE_URL and settings_service.settings.DATABASE_URL.startswith("sqlite"): + if settings_service.settings.database_url and settings_service.settings.database_url.startswith("sqlite"): connect_args = {"check_same_thread": False} else: connect_args = {} diff --git a/src/backend/base/langflow/services/monitor/utils.py b/src/backend/base/langflow/services/monitor/utils.py index aec5ae0c6..f603b3fde 100644 --- a/src/backend/base/langflow/services/monitor/utils.py +++ b/src/backend/base/langflow/services/monitor/utils.py @@ -8,6 +8,7 @@ from langflow.services.deps import get_monitor_service if TYPE_CHECKING: from langflow.api.v1.schemas import ResultDataResponse + from langflow.graph.vertex.base import Vertex INDEX_KEY = "index" @@ -165,3 +166,35 @@ async def log_vertex_build( monitor_service.add_row(table_name="vertex_builds", data=row) except Exception as e: logger.exception(f"Error logging vertex build: {e}") + + +def build_clean_params(target: "Vertex") -> dict: + """ + Cleans the parameters of the target vertex. + """ + # Removes all keys that the values aren't python types like str, int, bool, etc. + params = { + key: value for key, value in target.params.items() if isinstance(value, (str, int, bool, float, list, dict)) + } + # if it is a list we need to check if the contents are python types + for key, value in params.items(): + if isinstance(value, list): + params[key] = [item for item in value if isinstance(item, (str, int, bool, float, list, dict))] + return params + + +def log_transaction(vertex: "Vertex", status, error=None): + try: + monitor_service = get_monitor_service() + clean_params = build_clean_params(vertex) + data = { + "vertex_id": vertex.id, + "inputs": clean_params, + "output": str(vertex.result), + "timestamp": monitor_service.get_timestamp(), + "status": status, + "error": error, + } + monitor_service.add_row(table_name="transactions", data=data) + except Exception as e: + logger.error(f"Error logging transaction: {e}") diff --git a/src/backend/base/langflow/services/plugins/langfuse_plugin.py b/src/backend/base/langflow/services/plugins/langfuse_plugin.py index e6d37d3c5..ffc8139f3 100644 --- a/src/backend/base/langflow/services/plugins/langfuse_plugin.py +++ b/src/backend/base/langflow/services/plugins/langfuse_plugin.py @@ -24,12 +24,12 @@ class LangfuseInstance: settings_manager = get_settings_service() - if settings_manager.settings.LANGFUSE_PUBLIC_KEY and settings_manager.settings.LANGFUSE_SECRET_KEY: + if settings_manager.settings.langfuse_public_key and settings_manager.settings.langfuse_secret_key: logger.debug("Langfuse credentials found") cls._instance = Langfuse( - public_key=settings_manager.settings.LANGFUSE_PUBLIC_KEY, - secret_key=settings_manager.settings.LANGFUSE_SECRET_KEY, - host=settings_manager.settings.LANGFUSE_HOST, + public_key=settings_manager.settings.langfuse_public_key, + secret_key=settings_manager.settings.langfuse_secret_key, + host=settings_manager.settings.langfuse_host, ) else: logger.debug("No Langfuse credentials found") diff --git a/src/backend/base/langflow/services/settings/auth.py b/src/backend/base/langflow/services/settings/auth.py index 103c96a40..8e321ed19 100644 --- a/src/backend/base/langflow/services/settings/auth.py +++ b/src/backend/base/langflow/services/settings/auth.py @@ -4,7 +4,7 @@ from typing import Literal from loguru import logger from passlib.context import CryptContext -from pydantic import Field, SecretStr, validator +from pydantic import Field, SecretStr, field_validator from pydantic_settings import BaseSettings from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD @@ -47,6 +47,9 @@ class AuthSettings(BaseSettings): ACCESS_HTTPONLY: bool = False """The HttpOnly attribute of the access token cookie.""" + COOKIE_DOMAIN: str | None = None + """The domain attribute of the cookies. If None, the domain is not set.""" + pwd_context: CryptContext = CryptContext(schemes=["bcrypt"], deprecated="auto") class Config: @@ -62,23 +65,25 @@ class AuthSettings(BaseSettings): # the default values # so we need to validate the superuser and superuser_password # fields - @validator("SUPERUSER", "SUPERUSER_PASSWORD", pre=True) - def validate_superuser(cls, value, values): - if values.get("AUTO_LOGIN"): + @field_validator("SUPERUSER", "SUPERUSER_PASSWORD", mode="before") + @classmethod + def validate_superuser(cls, value, info): + if info.data.get("AUTO_LOGIN"): if value != DEFAULT_SUPERUSER: value = DEFAULT_SUPERUSER logger.debug("Resetting superuser to default value") - if values.get("SUPERUSER_PASSWORD") != DEFAULT_SUPERUSER_PASSWORD: - values["SUPERUSER_PASSWORD"] = DEFAULT_SUPERUSER_PASSWORD + if info.data.get("SUPERUSER_PASSWORD") != DEFAULT_SUPERUSER_PASSWORD: + info.data["SUPERUSER_PASSWORD"] = DEFAULT_SUPERUSER_PASSWORD logger.debug("Resetting superuser password to default value") return value return value - @validator("SECRET_KEY", pre=True) - def get_secret_key(cls, value, values): - config_dir = values.get("CONFIG_DIR") + @field_validator("SECRET_KEY", mode="before") + @classmethod + def get_secret_key(cls, value, info): + config_dir = info.data.get("CONFIG_DIR") if not config_dir: logger.debug("No CONFIG_DIR provided, not saving secret key") diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py index 97abf2d2b..0f9d0d029 100644 --- a/src/backend/base/langflow/services/settings/base.py +++ b/src/backend/base/langflow/services/settings/base.py @@ -7,12 +7,13 @@ from typing import Any, List, Optional, Tuple, Type import orjson import yaml -from langflow.services.settings.constants import VARIABLES_TO_GET_FROM_ENVIRONMENT from loguru import logger from pydantic import field_validator from pydantic.fields import FieldInfo from pydantic_settings import BaseSettings, EnvSettingsSource, PydanticBaseSettingsSource, SettingsConfigDict +from langflow.services.settings.constants import VARIABLES_TO_GET_FROM_ENVIRONMENT + # BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") BASE_COMPONENTS_PATH = str(Path(__file__).parent.parent.parent / "components") @@ -57,59 +58,44 @@ class MyCustomSource(EnvSettingsSource): class Settings(BaseSettings): - CHAINS: dict = {} - AGENTS: dict = {} - PROMPTS: dict = {} - LLMS: dict = {} - TOOLS: dict = {} - MEMORIES: dict = {} - EMBEDDINGS: dict = {} - VECTORSTORES: dict = {} - DOCUMENTLOADERS: dict = {} - WRAPPERS: dict = {} - RETRIEVERS: dict = {} - TOOLKITS: dict = {} - TEXTSPLITTERS: dict = {} - UTILITIES: dict = {} - CUSTOM_COMPONENTS: dict = {} - # Define the default LANGFLOW_DIR - CONFIG_DIR: Optional[str] = None + config_dir: Optional[str] = None # Define if langflow db should be saved in config dir or # in the langflow directory - SAVE_DB_IN_CONFIG_DIR: bool = False + save_db_in_config_dir: bool = False """Define if langflow database should be saved in LANGFLOW_CONFIG_DIR or in the langflow directory (i.e. in the package directory).""" - DEV: bool = False - DATABASE_URL: Optional[str] = None - CACHE_TYPE: str = "async" - REMOVE_API_KEYS: bool = False - COMPONENTS_PATH: List[str] = [] - LANGCHAIN_CACHE: str = "InMemoryCache" + dev: bool = False + database_url: Optional[str] = None + cache_type: str = "async" + remove_api_keys: bool = False + components_path: List[str] = [] + langchain_cache: str = "InMemoryCache" + load_flows_path: Optional[str] = None # Redis - REDIS_HOST: str = "localhost" - REDIS_PORT: int = 6379 - REDIS_DB: int = 0 - REDIS_URL: Optional[str] = None - REDIS_CACHE_EXPIRE: int = 3600 + redis_host: str = "localhost" + redis_port: int = 6379 + redis_db: int = 0 + redis_url: Optional[str] = None + redis_cache_expire: int = 3600 # PLUGIN_DIR: Optional[str] = None - LANGFUSE_SECRET_KEY: Optional[str] = None - LANGFUSE_PUBLIC_KEY: Optional[str] = None - LANGFUSE_HOST: Optional[str] = None + langfuse_secret_key: Optional[str] = None + langfuse_public_key: Optional[str] = None + langfuse_host: Optional[str] = None - STORE: Optional[bool] = True - STORE_URL: Optional[str] = "https://api.langflow.store" - DOWNLOAD_WEBHOOK_URL: Optional[str] = ( + store: Optional[bool] = True + store_url: Optional[str] = "https://api.langflow.store" + download_webhook_url: Optional[str] = ( "https://api.langflow.store/flows/trigger/ec611a61-8460-4438-b187-a4f65e5559d4" ) - LIKE_WEBHOOK_URL: Optional[str] = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da" + like_webhook_url: Optional[str] = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da" - STORAGE_TYPE: str = "local" + storage_type: str = "local" - CELERY_ENABLED: bool = False + celery_enabled: bool = False fallback_to_env_var: bool = True """If set to True, Global Variables set in the UI will fallback to a environment variable @@ -119,8 +105,12 @@ class Settings(BaseSettings): """Whether to store environment variables as Global Variables in the database.""" variables_to_get_from_environment: list[str] = VARIABLES_TO_GET_FROM_ENVIRONMENT """List of environment variables to get from the environment and store in the database.""" + worker_timeout: int = 300 + """Timeout for the API calls in seconds.""" + frontend_timeout: int = 0 + """Timeout for the frontend API calls in seconds.""" - @field_validator("CONFIG_DIR", mode="before") + @field_validator("config_dir", mode="before") def set_langflow_dir(cls, value): if not value: from platformdirs import user_cache_dir @@ -143,7 +133,7 @@ class Settings(BaseSettings): return str(value) - @field_validator("DATABASE_URL", mode="before") + @field_validator("database_url", mode="before") def set_database_url(cls, value, info): if not value: logger.debug("No database_url provided, trying LANGFLOW_DATABASE_URL env variable") @@ -151,17 +141,23 @@ class Settings(BaseSettings): value = langflow_database_url logger.debug("Using LANGFLOW_DATABASE_URL env variable.") else: - logger.debug("No DATABASE_URL env variable, using sqlite database") + logger.debug("No database_url env variable, using sqlite database") # Originally, we used sqlite:///./langflow.db # so we need to migrate to the new format # if there is a database in that location - if not info.data["CONFIG_DIR"]: - raise ValueError("CONFIG_DIR not set, please set it or provide a DATABASE_URL") - from langflow.version import is_pre_release # type: ignore + if not info.data["config_dir"]: + raise ValueError("config_dir not set, please set it or provide a database_url") + try: + from langflow.version import is_pre_release # type: ignore + except ImportError: + from importlib import metadata - if info.data["SAVE_DB_IN_CONFIG_DIR"]: - database_dir = info.data["CONFIG_DIR"] - logger.debug(f"Saving database to CONFIG_DIR: {database_dir}") + version = metadata.version("langflow-base") + is_pre_release = "a" in version or "b" in version or "rc" in version + + if info.data["save_db_in_config_dir"]: + database_dir = info.data["config_dir"] + logger.debug(f"Saving database to config_dir: {database_dir}") else: database_dir = Path(__file__).parent.parent.parent.resolve() logger.debug(f"Saving database to langflow directory: {database_dir}") @@ -174,12 +170,12 @@ class Settings(BaseSettings): if is_pre_release: if Path(new_pre_path).exists(): final_path = new_pre_path - elif Path(new_path).exists() and info.data["SAVE_DB_IN_CONFIG_DIR"]: + elif Path(new_path).exists() and info.data["save_db_in_config_dir"]: # We need to copy the current db to the new location logger.debug("Copying existing database to new location") copy2(new_path, new_pre_path) logger.debug(f"Copied existing database to {new_pre_path}") - elif Path(f"./{db_file_name}").exists() and info.data["SAVE_DB_IN_CONFIG_DIR"]: + elif Path(f"./{db_file_name}").exists() and info.data["save_db_in_config_dir"]: logger.debug("Copying existing database to new location") copy2(f"./{db_file_name}", new_pre_path) logger.debug(f"Copied existing database to {new_pre_path}") @@ -211,7 +207,7 @@ class Settings(BaseSettings): return value - @field_validator("COMPONENTS_PATH", mode="before") + @field_validator("components_path", mode="before") def set_components_path(cls, value): if os.getenv("LANGFLOW_COMPONENTS_PATH"): logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") @@ -240,23 +236,8 @@ class Settings(BaseSettings): def update_from_yaml(self, file_path: str, dev: bool = False): new_settings = load_settings_from_yaml(file_path) - self.CHAINS = new_settings.CHAINS or {} - self.AGENTS = new_settings.AGENTS or {} - self.PROMPTS = new_settings.PROMPTS or {} - self.LLMS = new_settings.LLMS or {} - self.TOOLS = new_settings.TOOLS or {} - self.MEMORIES = new_settings.MEMORIES or {} - self.WRAPPERS = new_settings.WRAPPERS or {} - self.TOOLKITS = new_settings.TOOLKITS or {} - self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {} - self.UTILITIES = new_settings.UTILITIES or {} - self.EMBEDDINGS = new_settings.EMBEDDINGS or {} - self.VECTORSTORES = new_settings.VECTORSTORES or {} - self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {} - self.RETRIEVERS = new_settings.RETRIEVERS or {} - self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {} - self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or [] - self.DEV = dev + self.components_path = new_settings.components_path or [] + self.dev = dev def update_settings(self, **kwargs): logger.debug("Updating settings") @@ -325,6 +306,3 @@ def load_settings_from_yaml(file_path: str) -> Settings: logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") return Settings(**settings_dict) - return Settings(**settings_dict) - return Settings(**settings_dict) - return Settings(**settings_dict) diff --git a/src/backend/base/langflow/services/settings/constants.py b/src/backend/base/langflow/services/settings/constants.py index 37c2f1db7..256030183 100644 --- a/src/backend/base/langflow/services/settings/constants.py +++ b/src/backend/base/langflow/services/settings/constants.py @@ -17,6 +17,8 @@ VARIABLES_TO_GET_FROM_ENVIRONMENT = [ "PINECONE_API_KEY", "SEARCHAPI_API_KEY", "SERPAPI_API_KEY", + "UPSTASH_VECTOR_REST_URL", + "UPSTASH_VECTOR_REST_TOKEN", "VECTARA_CUSTOMER_ID", "VECTARA_CORPUS_ID", "VECTARA_API_KEY", diff --git a/src/backend/base/langflow/services/settings/manager.py b/src/backend/base/langflow/services/settings/manager.py index f81c3f0c5..d7d2184f3 100644 --- a/src/backend/base/langflow/services/settings/manager.py +++ b/src/backend/base/langflow/services/settings/manager.py @@ -35,10 +35,10 @@ class SettingsService(Service): logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") settings = Settings(**settings_dict) - if not settings.CONFIG_DIR: + if not settings.config_dir: raise ValueError("CONFIG_DIR must be set in settings") auth_settings = AuthSettings( - CONFIG_DIR=settings.CONFIG_DIR, + CONFIG_DIR=settings.config_dir, ) return cls(settings, auth_settings) diff --git a/src/backend/base/langflow/services/settings/service.py b/src/backend/base/langflow/services/settings/service.py index 160c266ec..95088e829 100644 --- a/src/backend/base/langflow/services/settings/service.py +++ b/src/backend/base/langflow/services/settings/service.py @@ -1,11 +1,12 @@ import os +from typing import Optional import yaml +from loguru import logger + from langflow.services.base import Service from langflow.services.settings.auth import AuthSettings from langflow.services.settings.base import Settings -from loguru import logger - class SettingsService(Service): name = "settings_service" @@ -26,7 +27,6 @@ class SettingsService(Service): with open(file_path, "r") as f: settings_dict = yaml.safe_load(f) - settings_dict = {k.upper(): v for k, v in settings_dict.items()} for key in settings_dict: if key not in Settings.model_fields.keys(): @@ -34,10 +34,14 @@ class SettingsService(Service): logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") settings = Settings(**settings_dict) - if not settings.CONFIG_DIR: + if not settings.config_dir: raise ValueError("CONFIG_DIR must be set in settings") auth_settings = AuthSettings( - CONFIG_DIR=settings.CONFIG_DIR, + CONFIG_DIR=settings.config_dir, ) return cls(settings, auth_settings) + + def set(self, key, value): + setattr(self.settings, key, value) + return self.settings diff --git a/src/backend/base/langflow/services/storage/factory.py b/src/backend/base/langflow/services/storage/factory.py index 1b2baf050..ae4783f1e 100644 --- a/src/backend/base/langflow/services/storage/factory.py +++ b/src/backend/base/langflow/services/storage/factory.py @@ -13,7 +13,7 @@ class StorageServiceFactory(ServiceFactory): ) def create(self, session_service: SessionService, settings_service: SettingsService): - storage_type = settings_service.settings.STORAGE_TYPE + storage_type = settings_service.settings.storage_type if storage_type.lower() == "local": from .local import LocalStorageService diff --git a/src/backend/base/langflow/services/storage/local.py b/src/backend/base/langflow/services/storage/local.py index 815059857..9ad9feafb 100644 --- a/src/backend/base/langflow/services/storage/local.py +++ b/src/backend/base/langflow/services/storage/local.py @@ -11,7 +11,7 @@ class LocalStorageService(StorageService): def __init__(self, session_service, settings_service): """Initialize the local storage service with session and settings services.""" super().__init__(session_service, settings_service) - self.data_dir = Path(settings_service.settings.CONFIG_DIR) + self.data_dir = Path(settings_service.settings.config_dir) self.set_ready() def build_full_path(self, flow_id: str, file_name: str) -> str: diff --git a/src/backend/base/langflow/services/store/schema.py b/src/backend/base/langflow/services/store/schema.py index 0fe89de18..0c37e1166 100644 --- a/src/backend/base/langflow/services/store/schema.py +++ b/src/backend/base/langflow/services/store/schema.py @@ -1,7 +1,7 @@ from typing import List, Optional from uuid import UUID -from pydantic import BaseModel, validator +from pydantic import BaseModel, field_validator class TagResponse(BaseModel): @@ -37,7 +37,8 @@ class ListComponentResponse(BaseModel): private: Optional[bool] = None # tags comes as a TagsIdResponse but we want to return a list of TagResponse - @validator("tags", pre=True) + @field_validator("tags", mode="before") + @classmethod def tags_to_list(cls, v): # Check if all values are have id and name # if so, return v else transform to TagResponse diff --git a/src/backend/base/langflow/services/store/service.py b/src/backend/base/langflow/services/store/service.py index 4fabd435c..a1b221b63 100644 --- a/src/backend/base/langflow/services/store/service.py +++ b/src/backend/base/langflow/services/store/service.py @@ -79,9 +79,9 @@ class StoreService(Service): def __init__(self, settings_service: "SettingsService"): self.settings_service = settings_service - self.base_url = self.settings_service.settings.STORE_URL - self.download_webhook_url = self.settings_service.settings.DOWNLOAD_WEBHOOK_URL - self.like_webhook_url = self.settings_service.settings.LIKE_WEBHOOK_URL + self.base_url = self.settings_service.settings.store_url + self.download_webhook_url = self.settings_service.settings.download_webhook_url + self.like_webhook_url = self.settings_service.settings.like_webhook_url self.components_url = f"{self.base_url}/items/components" self.default_fields = [ "id", diff --git a/src/backend/base/langflow/services/task/service.py b/src/backend/base/langflow/services/task/service.py index 487b507cd..cca1645b8 100644 --- a/src/backend/base/langflow/services/task/service.py +++ b/src/backend/base/langflow/services/task/service.py @@ -29,7 +29,7 @@ class TaskService(Service): def __init__(self, settings_service: "SettingsService"): self.settings_service = settings_service try: - if self.settings_service.settings.CELERY_ENABLED: + if self.settings_service.settings.celery_enabled: USE_CELERY = True status = check_celery_availability() diff --git a/src/backend/base/langflow/settings.py b/src/backend/base/langflow/settings.py deleted file mode 100644 index 3f340df95..000000000 --- a/src/backend/base/langflow/settings.py +++ /dev/null @@ -1,166 +0,0 @@ -import contextlib -import json -import os -from pathlib import Path -from typing import List, Optional - -import yaml -from pydantic import model_validator, validator -from pydantic_settings import BaseSettings - -from langflow.utils.logger import logger - -BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") - - -class Settings(BaseSettings): - CHAINS: dict = {} - AGENTS: dict = {} - PROMPTS: dict = {} - LLMS: dict = {} - TOOLS: dict = {} - MEMORIES: dict = {} - EMBEDDINGS: dict = {} - VECTORSTORES: dict = {} - DOCUMENTLOADERS: dict = {} - WRAPPERS: dict = {} - RETRIEVERS: dict = {} - TOOLKITS: dict = {} - TEXTSPLITTERS: dict = {} - UTILITIES: dict = {} - CUSTOM_COMPONENTS: dict = {} - - DEV: bool = False - DATABASE_URL: Optional[str] = None - CACHE: str = "InMemoryCache" - REMOVE_API_KEYS: bool = False - COMPONENTS_PATH: List[str] = [] - - @validator("DATABASE_URL", pre=True) - def set_database_url(cls, value): - if not value: - logger.debug("No database_url provided, trying LANGFLOW_DATABASE_URL env variable") - if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): - value = langflow_database_url - logger.debug("Using LANGFLOW_DATABASE_URL env variable.") - else: - logger.debug("No DATABASE_URL env variable, using sqlite database") - value = "sqlite:///./langflow.db" - return value - - @validator("COMPONENTS_PATH", pre=True) - def set_components_path(cls, value): - if os.getenv("LANGFLOW_COMPONENTS_PATH"): - logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") - langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH") - if Path(langflow_component_path).exists() and langflow_component_path not in value: - if isinstance(langflow_component_path, list): - for path in langflow_component_path: - if path not in value: - value.append(path) - logger.debug(f"Extending {langflow_component_path} to components_path") - elif langflow_component_path not in value: - value.append(langflow_component_path) - logger.debug(f"Appending {langflow_component_path} to components_path") - - if not value: - value = [BASE_COMPONENTS_PATH] - logger.debug("Setting default components path to components_path") - elif BASE_COMPONENTS_PATH not in value: - value.append(BASE_COMPONENTS_PATH) - logger.debug("Adding default components path to components_path") - - logger.debug(f"Components path: {value}") - return value - - class Config: - validate_assignment = True - extra = "ignore" - env_prefix = "LANGFLOW_" - - @model_validator(mode="after") - def validate_lists(cls, values): - for key, value in values.items(): - if key != "dev" and not value: - values[key] = [] - return values - - def update_from_yaml(self, file_path: str, dev: bool = False): - new_settings = load_settings_from_yaml(file_path) - self.CHAINS = new_settings.CHAINS or {} - self.AGENTS = new_settings.AGENTS or {} - self.PROMPTS = new_settings.PROMPTS or {} - self.LLMS = new_settings.LLMS or {} - self.TOOLS = new_settings.TOOLS or {} - self.MEMORIES = new_settings.MEMORIES or {} - self.WRAPPERS = new_settings.WRAPPERS or {} - self.TOOLKITS = new_settings.TOOLKITS or {} - self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {} - self.UTILITIES = new_settings.UTILITIES or {} - self.EMBEDDINGS = new_settings.EMBEDDINGS or {} - self.VECTORSTORES = new_settings.VECTORSTORES or {} - self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {} - self.RETRIEVERS = new_settings.RETRIEVERS or {} - self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {} - self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or [] - self.DEV = dev - - def update_settings(self, **kwargs): - logger.debug("Updating settings") - for key, value in kwargs.items(): - # value may contain sensitive information, so we don't want to log it - if not hasattr(self, key): - logger.debug(f"Key {key} not found in settings") - continue - logger.debug(f"Updating {key}") - if isinstance(getattr(self, key), list): - # value might be a '[something]' string - with contextlib.suppress(json.decoder.JSONDecodeError): - value = json.loads(str(value)) - if isinstance(value, list): - for item in value: - if isinstance(item, Path): - item = str(item) - if item not in getattr(self, key): - getattr(self, key).append(item) - logger.debug(f"Extended {key}") - else: - if isinstance(value, Path): - value = str(value) - if value not in getattr(self, key): - getattr(self, key).append(value) - logger.debug(f"Appended {key}") - - else: - setattr(self, key, value) - logger.debug(f"Updated {key}") - logger.debug(f"{key}: {getattr(self, key)}") - - -def save_settings_to_yaml(settings: Settings, file_path: str): - with open(file_path, "w") as f: - settings_dict = settings.model_dump() - yaml.dump(settings_dict, f) - - -def load_settings_from_yaml(file_path: str) -> Settings: - # Check if a string is a valid path or a file name - if "/" not in file_path: - # Get current path - current_path = os.path.dirname(os.path.abspath(__file__)) - - file_path = os.path.join(current_path, file_path) - - with open(file_path, "r") as f: - settings_dict = yaml.safe_load(f) - settings_dict = {k.upper(): v for k, v in settings_dict.items()} - - for key in settings_dict: - if key not in Settings.model_fields.keys(): - raise KeyError(f"Key {key} not found in settings") - logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") - - return Settings(**settings_dict) - - -settings = load_settings_from_yaml("config.yaml") diff --git a/src/backend/base/langflow/template/frontend_node/__init__.py b/src/backend/base/langflow/template/frontend_node/__init__.py index ceb2e0cb9..98c6fdb01 100644 --- a/src/backend/base/langflow/template/frontend_node/__init__.py +++ b/src/backend/base/langflow/template/frontend_node/__init__.py @@ -1,29 +1,6 @@ -from langflow.template.frontend_node import ( - agents, - chains, - custom_components, - documentloaders, - embeddings, - llms, - memories, - prompts, - textsplitters, - tools, - vectorstores, - base, -) +from langflow.template.frontend_node import base, custom_components __all__ = [ - "agents", "base", - "chains", - "embeddings", - "memories", - "tools", - "llms", - "prompts", - "vectorstores", - "documentloaders", - "textsplitters", "custom_components", ] diff --git a/src/backend/base/langflow/template/frontend_node/agents.py b/src/backend/base/langflow/template/frontend_node/agents.py deleted file mode 100644 index 0993c1736..000000000 --- a/src/backend/base/langflow/template/frontend_node/agents.py +++ /dev/null @@ -1,172 +0,0 @@ -from typing import Optional - -from langchain.agents import types - - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.template.base import Template - -NON_CHAT_AGENTS = { - agent_type: agent_class - for agent_type, agent_class in types.AGENT_TO_CLASS.items() - if "chat" not in agent_type.value -} - - -class AgentFrontendNode(FrontendNode): - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - if field.name in ["suffix", "prefix"]: - field.show = True - if field.name == "Tools" and name == "ZeroShotAgent": - field.field_type = "BaseTool" - field.is_list = True - - -class SQLAgentNode(FrontendNode): - name: str = "SQLAgent" - template: Template = Template( - type_name="sql_agent", - fields=[ - TemplateField( - field_type="str", # pyright: ignore - required=True, - placeholder="", - is_list=False, # pyright: ignore - show=True, - multiline=False, - value="", - name="database_uri", - ), - TemplateField( - field_type="BaseLanguageModel", # pyright: ignore - required=True, - show=True, - name="llm", - display_name="LLM", - ), - ], - ) - description: str = """Construct an SQL agent from an LLM and tools.""" - base_classes: list[str] = ["AgentExecutor"] - - -class VectorStoreRouterAgentNode(FrontendNode): - name: str = "VectorStoreRouterAgent" - template: Template = Template( - type_name="vectorstorerouter_agent", - fields=[ - TemplateField( - field_type="VectorStoreRouterToolkit", # pyright: ignore - required=True, - show=True, - name="vectorstoreroutertoolkit", - display_name="Vector Store Router Toolkit", - ), - TemplateField( - field_type="BaseLanguageModel", # pyright: ignore - required=True, - show=True, - name="llm", - display_name="LLM", - ), - ], - ) - description: str = """Construct an agent from a Vector Store Router.""" - base_classes: list[str] = ["AgentExecutor"] - - -class VectorStoreAgentNode(FrontendNode): - name: str = "VectorStoreAgent" - template: Template = Template( - type_name="vectorstore_agent", - fields=[ - TemplateField( - field_type="VectorStoreInfo", # pyright: ignore - required=True, - show=True, - name="vectorstoreinfo", - display_name="Vector Store Info", - ), - TemplateField( - field_type="BaseLanguageModel", # pyright: ignore - required=True, - show=True, - name="llm", - display_name="LLM", - ), - ], - ) - description: str = """Construct an agent from a Vector Store.""" - base_classes: list[str] = ["AgentExecutor"] - - -class SQLDatabaseNode(FrontendNode): - name: str = "SQLDatabase" - template: Template = Template( - type_name="sql_database", - fields=[ - TemplateField( - field_type="str", # pyright: ignore - required=True, - is_list=False, # pyright: ignore - show=True, - multiline=False, - value="", - name="uri", - ), - ], - ) - description: str = """SQLAlchemy wrapper around a database.""" - base_classes: list[str] = ["SQLDatabase"] - - -class CSVAgentNode(FrontendNode): - name: str = "CSVAgent" - template: Template = Template( - type_name="csv_agent", - fields=[ - TemplateField( - field_type="file", # pyright: ignore - required=True, - show=True, - name="path", - value="", - file_types=[".csv"], # pyright: ignore - ), - TemplateField( - field_type="BaseLanguageModel", # pyright: ignore - required=True, - show=True, - name="llm", - display_name="LLM", - ), - ], - ) - description: str = """Construct a CSV agent from a CSV and tools.""" - base_classes: list[str] = ["AgentExecutor"] - - -class JsonAgentNode(FrontendNode): - name: str = "JsonAgent" - template: Template = Template( - type_name="json_agent", - fields=[ - TemplateField( - field_type="BaseToolkit", # pyright: ignore - required=True, - show=True, - name="toolkit", - ), - TemplateField( - field_type="BaseLanguageModel", # pyright: ignore - required=True, - show=True, - name="llm", - display_name="LLM", - ), - ], - ) - description: str = """Construct a json agent from an LLM and tools.""" - base_classes: list[str] = ["AgentExecutor"] diff --git a/src/backend/base/langflow/template/frontend_node/chains.py b/src/backend/base/langflow/template/frontend_node/chains.py deleted file mode 100644 index 4ce23a316..000000000 --- a/src/backend/base/langflow/template/frontend_node/chains.py +++ /dev/null @@ -1,265 +0,0 @@ -from typing import Optional - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.frontend_node.constants import QA_CHAIN_TYPES -from langflow.template.template.base import Template - - -class ChainFrontendNode(FrontendNode): - output_type: str = "Chain" - - def add_extra_base_classes(self) -> None: - self.base_classes.append("Text") - - def add_extra_fields(self) -> None: - if self.template.type_name == "ConversationalRetrievalChain": - # add memory - self.template.add_field( - TemplateField( - field_type="BaseChatMemory", - required=True, - show=True, - name="memory", - advanced=False, - ) - ) - # add return_source_documents - self.template.add_field( - TemplateField( - field_type="bool", - required=False, - show=True, - name="return_source_documents", - advanced=False, - value=True, - display_name="Return source documents", - ) - ) - self.template.add_field( - TemplateField( - field_type="str", - required=True, - is_list=True, - show=True, - multiline=False, - options=QA_CHAIN_TYPES, - value=QA_CHAIN_TYPES[0], - name="chain_type", - advanced=False, - ) - ) - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - - if "name" == "RetrievalQA" and field.name == "memory": - field.show = False - field.required = False - - field.advanced = False - if "key" in str(field.name): - field.password = False - field.show = False - if field.name in ["input_key", "output_key"]: - field.required = True - field.show = True - field.advanced = True - - # We should think of a way to deal with this later - # if field.field_type == "PromptTemplate": - # field.field_type = "str" - # field.multiline = True - # field.show = True - # field.advanced = False - # field.value = field.value.template - - # Separated for possible future changes - if field.name == "prompt" and field.value is None: - field.required = True - field.show = True - field.advanced = False - if field.name == "memory": - # field.required = False - field.show = True - field.advanced = False - if field.name == "verbose": - field.required = False - field.show = False - field.advanced = True - if field.name == "llm": - field.required = True - field.show = True - field.advanced = False - field.field_type = "BaseLanguageModel" - field.is_list = False - - if field.name == "return_source_documents": - field.required = False - field.show = True - field.advanced = True - field.value = True - - -class SeriesCharacterChainNode(FrontendNode): - output_type: str = "Chain" - name: str = "SeriesCharacterChain" - template: Template = Template( - type_name="SeriesCharacterChain", - fields=[ - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - advanced=False, - multiline=False, - name="character", - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - advanced=False, - multiline=False, - name="series", - ), - TemplateField( - field_type="BaseLanguageModel", - required=True, - placeholder="", - is_list=False, - show=True, - advanced=False, - multiline=False, - name="llm", - display_name="LLM", - ), - ], - ) - description: str = ( - "SeriesCharacterChain is a chain you can use to have a conversation with a character from a series." # noqa - ) - base_classes: list[str] = [ - "LLMChain", - "BaseCustomChain", - "Chain", - "ConversationChain", - "SeriesCharacterChain", - "function", - ] - - -class TimeTravelGuideChainNode(FrontendNode): - output_type: str = "Chain" - name: str = "TimeTravelGuideChain" - template: Template = Template( - type_name="TimeTravelGuideChain", - fields=[ - TemplateField( - field_type="BaseLanguageModel", - required=True, - placeholder="", - is_list=False, - show=True, - advanced=False, - multiline=False, - name="llm", - display_name="LLM", - ), - TemplateField( - field_type="BaseChatMemory", - required=False, - show=True, - name="memory", - advanced=False, - ), - ], - ) - description: str = "Time travel guide chain." - base_classes: list[str] = [ - "LLMChain", - "BaseCustomChain", - "TimeTravelGuideChain", - "Chain", - "ConversationChain", - ] - - -class MidJourneyPromptChainNode(FrontendNode): - output_type: str = "Chain" - name: str = "MidJourneyPromptChain" - template: Template = Template( - type_name="MidJourneyPromptChain", - fields=[ - TemplateField( - field_type="BaseLanguageModel", - required=True, - placeholder="", - is_list=False, - show=True, - advanced=False, - multiline=False, - name="llm", - display_name="LLM", - ), - TemplateField( - field_type="BaseChatMemory", - required=False, - show=True, - name="memory", - advanced=False, - ), - ], - ) - description: str = "MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts." - base_classes: list[str] = [ - "LLMChain", - "BaseCustomChain", - "Chain", - "ConversationChain", - "MidJourneyPromptChain", - ] - - -class CombineDocsChainNode(FrontendNode): - output_type: str = "Chain" - name: str = "CombineDocsChain" - template: Template = Template( - type_name="load_qa_chain", - fields=[ - TemplateField( - field_type="str", - required=True, - is_list=True, - show=True, - multiline=False, - options=QA_CHAIN_TYPES, - value=QA_CHAIN_TYPES[0], - name="chain_type", - advanced=False, - ), - TemplateField( - field_type="BaseLanguageModel", - required=True, - show=True, - name="llm", - display_name="LLM", - advanced=False, - ), - ], - ) - description: str = """Load question answering chain.""" - base_classes: list[str] = ["BaseCombineDocumentsChain", "function"] - - def to_dict(self): - return super().to_dict() - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - # do nothing and don't return anything - pass diff --git a/src/backend/base/langflow/template/frontend_node/custom_components.py b/src/backend/base/langflow/template/frontend_node/custom_components.py index 577c89684..932d30799 100644 --- a/src/backend/base/langflow/template/frontend_node/custom_components.py +++ b/src/backend/base/langflow/template/frontend_node/custom_components.py @@ -4,7 +4,7 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -DEFAULT_CUSTOM_COMPONENT_CODE = """from langflow.interface.custom.custom_component import CustomComponent +DEFAULT_CUSTOM_COMPONENT_CODE = """from langflow.custom import CustomComponent from typing import Optional, List, Dict, Union from langflow.field_typing import ( diff --git a/src/backend/base/langflow/template/frontend_node/documentloaders.py b/src/backend/base/langflow/template/frontend_node/documentloaders.py deleted file mode 100644 index 31e13894a..000000000 --- a/src/backend/base/langflow/template/frontend_node/documentloaders.py +++ /dev/null @@ -1,301 +0,0 @@ -from typing import ClassVar, Dict, Optional - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode - - -def build_file_field(fileTypes: list, name: str = "file_path") -> TemplateField: - """Build a template field for a document loader.""" - return TemplateField( - field_type="file", - required=True, - show=True, - name=name, - value="", - file_types=fileTypes, - ) - - -class DocumentLoaderFrontNode(FrontendNode): - def add_extra_base_classes(self) -> None: - self.base_classes = ["Document"] - self.output_types = ["Document"] - - file_path_templates: ClassVar[Dict] = { - "AirbyteJSONLoader": build_file_field( - fileTypes=[".json"], - ), - "CoNLLULoader": build_file_field( - fileTypes=[".csv"], - ), - "CSVLoader": build_file_field( - fileTypes=[".csv"], - ), - "UnstructuredEmailLoader": build_file_field( - fileTypes=[".eml"], - ), - "EverNoteLoader": build_file_field( - fileTypes=[".xml"], - ), - "FacebookChatLoader": build_file_field( - fileTypes=[".json"], - ), - "BSHTMLLoader": build_file_field( - fileTypes=[".html"], - ), - "UnstructuredHTMLLoader": build_file_field(fileTypes=[".html"]), - "UnstructuredImageLoader": build_file_field( - fileTypes=[".jpg", ".jpeg", ".png", ".gif", ".bmp"], - ), - "UnstructuredMarkdownLoader": build_file_field( - fileTypes=[".md"], - ), - "PyPDFLoader": build_file_field( - fileTypes=[".pdf"], - ), - "UnstructuredPowerPointLoader": build_file_field( - fileTypes=[".pptx", ".ppt"], - ), - "SRTLoader": build_file_field( - fileTypes=[".srt"], - ), - "TelegramChatLoader": build_file_field( - fileTypes=[".json"], - ), - "TextLoader": build_file_field( - fileTypes=[".txt"], - ), - "UnstructuredWordDocumentLoader": build_file_field( - fileTypes=[".docx", ".doc"], - ), - } - - def add_extra_fields(self) -> None: - name = None - display_name = "Web Page" - if self.template.type_name in {"GitLoader"}: - # Add fields repo_path, clone_url, branch and file_filter - self.template.add_field( - TemplateField( - field_type="str", - required=True, - show=True, - name="repo_path", - value="", - display_name="Path to repository", - advanced=False, - ) - ) - self.template.add_field( - TemplateField( - field_type="str", - required=False, - show=True, - name="clone_url", - value="", - display_name="Clone URL", - advanced=False, - ) - ) - self.template.add_field( - TemplateField( - field_type="str", - required=True, - show=True, - name="branch", - value="", - display_name="Branch", - advanced=False, - ) - ) - self.template.add_field( - TemplateField( - field_type="str", - required=False, - show=True, - name="file_filter", - value="", - display_name="File extensions (comma-separated)", - advanced=False, - ) - ) - elif self.template.type_name in {"SlackDirectoryLoader"}: - self.template.add_field( - TemplateField( - field_type="file", - required=True, - show=True, - name="zip_path", - value="", - display_name="Path to zip file", - file_types=[".zip"], - ) - ) - self.template.add_field( - TemplateField( - field_type="str", - required=False, - show=True, - name="workspace_url", - value="", - display_name="Workspace URL", - advanced=False, - ) - ) - elif self.template.type_name in self.file_path_templates: - self.template.add_field(self.file_path_templates[self.template.type_name]) - elif self.template.type_name in { - "WebBaseLoader", - "AZLyricsLoader", - "CollegeConfidentialLoader", - "HNLoader", - "IFixitLoader", - "IMSDbLoader", - "GutenbergLoader", - }: - name = "web_path" - elif self.template.type_name in {"GutenbergLoader"}: - name = "file_path" - elif self.template.type_name in {"GitbookLoader"}: - name = "web_page" - elif self.template.type_name in { - "DirectoryLoader", - "ReadTheDocsLoader", - "NotionDirectoryLoader", - "PyPDFDirectoryLoader", - }: - name = "path" - display_name = "Local directory" - if name: - if self.template.type_name in {"DirectoryLoader"}: - for field in build_directory_loader_fields(): - self.template.add_field(field) - else: - self.template.add_field( - TemplateField( - field_type="str", - required=True, - show=True, - name=name, - value="", - display_name=display_name, - ) - ) - # add a metadata field of type dict - self.template.add_field( - TemplateField( - field_type="dict", - required=False, - show=True, - name="metadata", - value={}, - display_name="Metadata", - multiline=False, - ) - ) - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - if field.name == "metadata": - field.show = True - field.advanced = False - field.show = True - - -def build_directory_loader_fields(): - # if loader_kwargs is None: - # loader_kwargs = {} - # self.path = path - # self.glob = glob - # self.load_hidden = load_hidden - # self.loader_cls = loader_cls - # self.loader_kwargs = loader_kwargs - # self.silent_errors = silent_errors - # self.recursive = recursive - # self.show_progress = show_progress - # self.use_multithreading = use_multithreading - # self.max_concurrency = max_concurrency - # Based on the above fields, we can build the following fields: - # path, glob, load_hidden, silent_errors, recursive, show_progress, use_multithreading, max_concurrency - # path - path = TemplateField( - field_type="str", - required=True, - show=True, - name="path", - value="", - display_name="Local directory", - advanced=False, - ) - # glob - glob = TemplateField( - field_type="str", - required=True, - show=True, - name="glob", - value="**/*.txt", - display_name="glob", - advanced=False, - ) - # load_hidden - load_hidden = TemplateField( - field_type="bool", - required=False, - show=True, - name="load_hidden", - value="False", - display_name="Load hidden files", - advanced=True, - ) - # silent_errors - silent_errors = TemplateField( - field_type="bool", - required=False, - show=True, - name="silent_errors", - value="False", - display_name="Silent errors", - advanced=True, - ) - # recursive - recursive = TemplateField( - field_type="bool", - required=False, - show=True, - name="recursive", - value="True", - display_name="Recursive", - advanced=True, - ) - - # use_multithreading - use_multithreading = TemplateField( - field_type="bool", - required=False, - show=True, - name="use_multithreading", - value="True", - display_name="Use multithreading", - advanced=True, - ) - # max_concurrency - max_concurrency = TemplateField( - field_type="int", - required=False, - show=True, - name="max_concurrency", - value=10, - display_name="Max concurrency", - advanced=True, - ) - - return ( - path, - glob, - load_hidden, - silent_errors, - recursive, - use_multithreading, - max_concurrency, - ) diff --git a/src/backend/base/langflow/template/frontend_node/embeddings.py b/src/backend/base/langflow/template/frontend_node/embeddings.py deleted file mode 100644 index a2974487e..000000000 --- a/src/backend/base/langflow/template/frontend_node/embeddings.py +++ /dev/null @@ -1,119 +0,0 @@ -from typing import Optional - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode - - -class EmbeddingFrontendNode(FrontendNode): - def add_extra_fields(self) -> None: - if "VertexAI" in self.template.type_name: - # Add credentials field which should of type file. - self.template.add_field( - TemplateField( - field_type="file", - required=False, - show=True, - name="credentials", - value="", - file_types=[".json"], - ) - ) - - @staticmethod - def format_vertex_field(field: TemplateField, name: str): - if "VertexAI" in name: - key = field.name or "" - advanced_fields = [ - "verbose", - "top_p", - "top_k", - "max_output_tokens", - ] - if key in advanced_fields: - field.advanced = True - show_fields = [ - "verbose", - "project", - "location", - "credentials", - "max_output_tokens", - "model_name", - "temperature", - "top_p", - "top_k", - ] - - if key in show_fields: - field.show = True - - @staticmethod - def format_jina_fields(field: TemplateField): - name = field.name or "" - if "jina" in name: - field.show = True - field.advanced = False - - if "auth" in name or "token" in name: - field.password = True - field.show = True - field.advanced = False - - if name == "jina_api_url": - field.show = True - field.advanced = True - field.display_name = "Jina API URL" - field.password = False - - @staticmethod - def format_openai_fields(field: TemplateField): - name = field.name or "" - if "openai" in name: - field.show = True - field.advanced = True - split_name = name.split("_") - title_name = " ".join([s.capitalize() for s in split_name]) - field.display_name = title_name.replace("Openai", "OpenAI").replace("Api", "API") - - if "api_key" in name: - field.password = True - field.show = True - field.advanced = False - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - if name and "vertex" in name.lower(): - EmbeddingFrontendNode.format_vertex_field(field, name) - field.advanced = not field.required - field.show = True - key = field.name or "" - if key == "headers": - field.show = False - if key == "model_kwargs": - field.field_type = "dict" - field.advanced = True - field.show = True - elif key in [ - "model_name", - "temperature", - "model_file", - "model_type", - "deployment_name", - "credentials", - ]: - field.advanced = False - field.show = True - if key == "credentials": - field.field_type = "file" - if name == "VertexAI" and key not in [ - "callbacks", - "client", - "stop", - "tags", - "cache", - ]: - field.show = True - - # Format Jina fields - EmbeddingFrontendNode.format_jina_fields(field) - EmbeddingFrontendNode.format_openai_fields(field) diff --git a/src/backend/base/langflow/template/frontend_node/llms.py b/src/backend/base/langflow/template/frontend_node/llms.py deleted file mode 100644 index 7bf5a8cb6..000000000 --- a/src/backend/base/langflow/template/frontend_node/llms.py +++ /dev/null @@ -1,154 +0,0 @@ -from typing import Optional - -from langflow.services.database.models.base import orjson_dumps -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.frontend_node.constants import CTRANSFORMERS_DEFAULT_CONFIG, OPENAI_API_BASE_INFO - - -class LLMFrontendNode(FrontendNode): - def add_extra_fields(self) -> None: - if "VertexAI" in self.template.type_name: - # Add credentials field which should of type file. - self.template.add_field( - TemplateField( - field_type="file", - required=False, - show=True, - name="credentials", - value="", - file_types=[".json"], - ) - ) - - @staticmethod - def format_vertex_field(field: TemplateField, name: str): - key = field.name or "" - if "VertexAI" in name: - advanced_fields = [ - "tuned_model_name", - "verbose", - "top_p", - "top_k", - "max_output_tokens", - ] - if key in advanced_fields: - field.advanced = True - show_fields = [ - "tuned_model_name", - "verbose", - "project", - "location", - "credentials", - "max_output_tokens", - "model_name", - "temperature", - "top_p", - "top_k", - ] - - if key in show_fields: - field.show = True - - @staticmethod - def format_openai_field(field: TemplateField): - key = field.name or "" - if "openai" in key.lower(): - field.display_name = (key.title().replace("Openai", "OpenAI").replace("_", " ")).replace("Api", "API") - - if "key" not in key.lower() and "token" not in key.lower(): - field.password = False - - if key == "openai_api_base": - field.info = OPENAI_API_BASE_INFO - - def add_extra_base_classes(self) -> None: - if "BaseLanguageModel" not in self.base_classes: - self.base_classes.append("BaseLanguageModel") - - @staticmethod - def format_azure_field(field: TemplateField): - key = field.name or "" - if key == "model_name": - field.show = False # Azure uses deployment_name instead of model_name. - elif key == "openai_api_type": - field.show = False - field.password = False - field.value = "azure" - elif key == "openai_api_version": - field.password = False - - @staticmethod - def format_llama_field(field: TemplateField): - field.show = True - field.advanced = not field.required - - @staticmethod - def format_ctransformers_field(field: TemplateField): - key = field.name or "" - if key == "config": - field.show = True - field.advanced = True - field.value = orjson_dumps(CTRANSFORMERS_DEFAULT_CONFIG, indent_2=True) - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - display_names_dict = { - "huggingfacehub_api_token": "HuggingFace Hub API Token", - } - FrontendNode.format_field(field, name) - LLMFrontendNode.format_openai_field(field) - LLMFrontendNode.format_ctransformers_field(field) - if name and "azure" in name.lower(): - LLMFrontendNode.format_azure_field(field) - if name and "llama" in name.lower(): - LLMFrontendNode.format_llama_field(field) - if name and "vertex" in name.lower(): - LLMFrontendNode.format_vertex_field(field, name) - SHOW_FIELDS = ["repo_id"] - key = field.name or "" - if key in SHOW_FIELDS: - field.show = True - - if "api" in key and ("key" in key or ("token" in key and "tokens" not in key)): - field.password = True - field.show = True - # Required should be False to support - # loading the API key from environment variables - field.required = False - field.advanced = False - - if key == "task": - field.required = True - field.show = True - field.is_list = True - field.options = ["text-generation", "text2text-generation", "summarization"] - field.value = field.options[0] - field.advanced = True - - if display_name := display_names_dict.get(key): - field.display_name = display_name - if key == "model_kwargs": - field.field_type = "dict" - field.advanced = True - field.show = True - elif key in [ - "model_name", - "temperature", - "model_file", - "model_type", - "deployment_name", - "credentials", - ]: - field.advanced = False - field.show = True - if key == "credentials": - field.field_type = "file" - if name == "VertexAI" and key not in [ - "callbacks", - "client", - "stop", - "tags", - "cache", - ]: - field.show = True diff --git a/src/backend/base/langflow/template/frontend_node/memories.py b/src/backend/base/langflow/template/frontend_node/memories.py deleted file mode 100644 index 1cdc8febb..000000000 --- a/src/backend/base/langflow/template/frontend_node/memories.py +++ /dev/null @@ -1,190 +0,0 @@ -from typing import Optional - -from langchain_community.chat_message_histories.mongodb import DEFAULT_COLLECTION_NAME, DEFAULT_DBNAME -from langchain_community.chat_message_histories.postgres import DEFAULT_CONNECTION_STRING - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.frontend_node.constants import INPUT_KEY_INFO, OUTPUT_KEY_INFO -from langflow.template.template.base import Template - - -class MemoryFrontendNode(FrontendNode): - frozen: bool = True - - def add_extra_fields(self) -> None: - # chat history should have another way to add common field? - # prevent adding incorect field in ChatMessageHistory - base_message_classes = ["BaseEntityStore", "BaseChatMessageHistory"] - if any(base_class in self.base_classes for base_class in base_message_classes): - return - - # add return_messages field - self.template.add_field( - TemplateField( - field_type="bool", - required=False, - show=True, - name="return_messages", - advanced=False, - value=False, - ) - ) - # add input_key and output_key str fields - self.template.add_field( - TemplateField( - field_type="str", - required=False, - show=True, - name="input_key", - advanced=True, - value="", - ) - ) - if self.template.type_name not in {"VectorStoreRetrieverMemory"}: - self.template.add_field( - TemplateField( - field_type="str", - required=False, - show=True, - name="output_key", - advanced=True, - value="", - ) - ) - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - - if not isinstance(field.value, str): - field.value = None - if field.name == "k": - field.required = True - field.show = True - field.field_type = "int" - field.value = 10 - field.display_name = "Memory Size" - field.password = False - if field.name == "return_messages": - field.required = False - field.show = True - field.advanced = False - if field.name in {"input_key", "output_key"}: - field.required = False - field.show = True - field.advanced = False - field.value = "" - field.info = INPUT_KEY_INFO if field.name == "input_key" else OUTPUT_KEY_INFO - - if field.name == "memory_key": - field.value = "chat_history" - if field.name == "chat_memory": - field.show = True - field.advanced = False - field.required = False - if field.name == "url": - field.show = True - if field.name == "entity_store": - field.show = False - if name == "ConversationEntityMemory" and field.name == "memory_key": - field.show = False - field.required = False - - if name == "MotorheadMemory": - if field.name == "chat_memory": - field.show = False - field.required = False - elif field.name == "client_id": - field.show = True - field.advanced = False - - -class PostgresChatMessageHistoryFrontendNode(MemoryFrontendNode): - name: str = "PostgresChatMessageHistory" - template: Template = Template( - type_name="PostgresChatMessageHistory", - fields=[ - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - name="session_id", - ), - TemplateField( - field_type="str", - required=True, - show=True, - name="connection_string", - value=DEFAULT_CONNECTION_STRING, - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - value="message_store", - name="table_name", - ), - ], - ) - description: str = "Memory store with Postgres" - base_classes: list[str] = ["PostgresChatMessageHistory", "BaseChatMessageHistory"] - - -class MongoDBChatMessageHistoryFrontendNode(MemoryFrontendNode): - name: str = "MongoDBChatMessageHistory" - template: Template = Template( - # langchain/memory/chat_message_histories/mongodb.py - # connection_string: str, - # session_id: str, - # database_name: str = DEFAULT_DBNAME, - # collection_name: str = DEFAULT_COLLECTION_NAME, - type_name="MongoDBChatMessageHistory", - fields=[ - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - name="session_id", - ), - TemplateField( - field_type="str", - required=True, - show=True, - name="connection_string", - value="", - info="MongoDB connection string (e.g mongodb://mongo_user:password123@mongo:27017)", - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - value=DEFAULT_DBNAME, - name="database_name", - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - value=DEFAULT_COLLECTION_NAME, - name="collection_name", - ), - ], - ) - description: str = "Memory store with MongoDB" - base_classes: list[str] = ["MongoDBChatMessageHistory", "BaseChatMessageHistory"] diff --git a/src/backend/base/langflow/template/frontend_node/prompts.py b/src/backend/base/langflow/template/frontend_node/prompts.py deleted file mode 100644 index 03445f753..000000000 --- a/src/backend/base/langflow/template/frontend_node/prompts.py +++ /dev/null @@ -1,107 +0,0 @@ -from typing import Optional - -from langchain.agents.mrkl import prompt - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.frontend_node.constants import DEFAULT_PROMPT, HUMAN_PROMPT, SYSTEM_PROMPT -from langflow.template.template.base import Template - - -class PromptFrontendNode(FrontendNode): - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - # if field.field_type == "StringPromptTemplate" - # change it to str - PROMPT_FIELDS = [ - "template", - "suffix", - "prefix", - "examples", - "format_instructions", - ] - key = field.name or "" - if field.field_type == "StringPromptTemplate" and "Message" in str(name): - field.field_type = "prompt" - field.multiline = True - field.value = HUMAN_PROMPT if "Human" in key else SYSTEM_PROMPT - if key == "template" and field.value == "": - field.value = DEFAULT_PROMPT - - if key and key in PROMPT_FIELDS: - field.field_type = "prompt" - field.advanced = False - - if "Union" in field.field_type and "BaseMessagePromptTemplate" in field.field_type: - field.field_type = "BaseMessagePromptTemplate" - - # All prompt fields should be password=False - field.password = False - field.dynamic = True - - -class PromptTemplateNode(FrontendNode): - name: str = "PromptTemplate" - template: Template - description: str - base_classes: list[str] = ["BasePromptTemplate"] - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - - if (field.name or "") == "examples": - field.advanced = False - - -class BasePromptFrontendNode(FrontendNode): - name: str - template: Template - description: str - base_classes: list[str] - - -class ZeroShotPromptNode(BasePromptFrontendNode): - name: str = "ZeroShotPrompt" - template: Template = Template( - type_name="ZeroShotPrompt", - fields=[ - TemplateField( - field_type="str", - required=False, - placeholder="", - is_list=False, - show=True, - multiline=True, - value=prompt.PREFIX, - name="prefix", - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=True, - value=prompt.FORMAT_INSTRUCTIONS, - name="format_instructions", - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=True, - value=prompt.SUFFIX, - name="suffix", - ), - ], - ) - description: str = "Prompt template for Zero Shot Agent." - base_classes: list[str] = ["BasePromptTemplate"] - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - PromptFrontendNode.format_field(field, name) diff --git a/src/backend/base/langflow/template/frontend_node/retrievers.py b/src/backend/base/langflow/template/frontend_node/retrievers.py deleted file mode 100644 index b482c8b84..000000000 --- a/src/backend/base/langflow/template/frontend_node/retrievers.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Optional - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode - - -class RetrieverFrontendNode(FrontendNode): - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - # Define common field attributes - field.show = True - if field.name == "parser_key": - field.display_name = "Parser Key" - field.password = False diff --git a/src/backend/base/langflow/template/frontend_node/textsplitters.py b/src/backend/base/langflow/template/frontend_node/textsplitters.py deleted file mode 100644 index 8fc5620d2..000000000 --- a/src/backend/base/langflow/template/frontend_node/textsplitters.py +++ /dev/null @@ -1,73 +0,0 @@ -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langchain_text_splitters import Language - - -class TextSplittersFrontendNode(FrontendNode): - def add_extra_base_classes(self) -> None: - self.base_classes = ["Document"] - self.output_types = ["Document"] - - def add_extra_fields(self) -> None: - self.template.add_field( - TemplateField( - field_type="Document", - required=True, - show=True, - name="documents", - is_list=True, - ) - ) - name = "separator" - if self.template.type_name == "CharacterTextSplitter": - name = "separator" - elif self.template.type_name == "RecursiveCharacterTextSplitter": - name = "separators" - # Add a field for type of separator - # which will have Text or any value from the - # Language enum - options = [x.value for x in Language] + ["Text"] - options.sort() - self.template.add_field( - TemplateField( - field_type="str", - required=True, - show=True, - name="separator_type", - advanced=False, - is_list=True, - options=options, - value="Text", - display_name="Separator Type", - ) - ) - self.template.add_field( - TemplateField( - field_type="str", - required=True, - show=True, - value="\\n", - name=name, - display_name="Separator", - ) - ) - self.template.add_field( - TemplateField( - field_type="int", - required=True, - show=True, - value=1000, - name="chunk_size", - display_name="Chunk Size", - ) - ) - self.template.add_field( - TemplateField( - field_type="int", - required=True, - show=True, - value=200, - name="chunk_overlap", - display_name="Chunk Overlap", - ) - ) diff --git a/src/backend/base/langflow/template/frontend_node/tools.py b/src/backend/base/langflow/template/frontend_node/tools.py deleted file mode 100644 index 5bed90c05..000000000 --- a/src/backend/base/langflow/template/frontend_node/tools.py +++ /dev/null @@ -1,130 +0,0 @@ -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode -from langflow.template.template.base import Template -from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION - - -class ToolNode(FrontendNode): - name: str = "Tool" - template: Template = Template( - type_name="Tool", - fields=[ - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=True, - value="", - name="name", - advanced=False, - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=True, - value="", - name="description", - advanced=False, - ), - TemplateField( - name="func", - field_type="Callable", - required=True, - is_list=False, - show=True, - multiline=True, - advanced=False, - ), - TemplateField( - field_type="bool", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - value=False, - name="return_direct", - ), - ], - ) - description: str = "Converts a chain, agent or function into a tool." - base_classes: list[str] = ["Tool", "BaseTool"] - - -class PythonFunctionToolNode(FrontendNode): - name: str = "PythonFunctionTool" - template: Template = Template( - type_name="PythonFunctionTool", - fields=[ - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - value="", - name="name", - advanced=False, - ), - TemplateField( - field_type="str", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - value="", - name="description", - advanced=False, - ), - TemplateField( - field_type="code", - required=True, - placeholder="", - is_list=False, - show=True, - value=DEFAULT_PYTHON_FUNCTION, - name="code", - advanced=False, - ), - TemplateField( - field_type="bool", - required=True, - placeholder="", - is_list=False, - show=True, - multiline=False, - value=False, - name="return_direct", - ), - ], - ) - description: str = "Python function to be executed." - base_classes: list[str] = ["BaseTool", "Tool"] - - -class PythonFunctionNode(FrontendNode): - name: str = "PythonFunction" - template: Template = Template( - type_name="PythonFunction", - fields=[ - TemplateField( - field_type="code", - required=True, - placeholder="", - is_list=False, - show=True, - value=DEFAULT_PYTHON_FUNCTION, - name="code", - advanced=False, - ) - ], - ) - description: str = "Python function to be executed." - base_classes: list[str] = ["Callable"] diff --git a/src/backend/base/langflow/template/frontend_node/utilities.py b/src/backend/base/langflow/template/frontend_node/utilities.py deleted file mode 100644 index 51849189c..000000000 --- a/src/backend/base/langflow/template/frontend_node/utilities.py +++ /dev/null @@ -1,24 +0,0 @@ -import ast -from typing import Optional - -from langflow.services.database.models.base import orjson_dumps -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode - - -class UtilitiesFrontendNode(FrontendNode): - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - # field.field_type could be "Literal['news', 'search', 'places', 'images'] - # we need to convert it to a list - # It seems it could also be like "typing_extensions.['news', 'search', 'places', 'images']" - if "Literal" in field.field_type: - field_type = field.field_type.replace("typing_extensions.", "") - field_type = field_type.replace("Literal", "") - field.options = ast.literal_eval(field_type) - field.is_list = True - field.field_type = "str" - - if isinstance(field.value, dict): - field.value = orjson_dumps(field.value) diff --git a/src/backend/base/langflow/template/frontend_node/vectorstores.py b/src/backend/base/langflow/template/frontend_node/vectorstores.py deleted file mode 100644 index 1f49f76a9..000000000 --- a/src/backend/base/langflow/template/frontend_node/vectorstores.py +++ /dev/null @@ -1,369 +0,0 @@ -from typing import List, Optional - -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.base import FrontendNode - -BASIC_FIELDS = [ - "work_dir", - "collection_name", - "api_key", - "location", - "persist_directory", - "persist", - "weaviate_url", - "es_url", - "index_name", - "namespace", - "folder_path", - "table_name", - "query_name", - "supabase_url", - "supabase_service_key", - "mongodb_atlas_cluster_uri", - "collection_name", - "db_name", -] -ADVANCED_FIELDS = [ - "n_dim", - "key", - "prefix", - "distance_func", - "content_payload_key", - "metadata_payload_key", - "timeout", - "host", - "path", - "url", - "port", - "https", - "prefer_grpc", - "grpc_port", - "pinecone_api_key", - "pinecone_env", - "client_kwargs", - "search_kwargs", - "chroma_server_host", - "chroma_server_http_port", - "chroma_server_ssl_enabled", - "chroma_server_grpc_port", - "chroma_server_cors_allow_origins", -] - - -class VectorStoreFrontendNode(FrontendNode): - def add_extra_fields(self) -> None: - extra_fields: List[TemplateField] = [] - # Add search_kwargs field - extra_field = TemplateField( - name="search_kwargs", - field_type="NestedDict", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - value="{}", - ) - extra_fields.append(extra_field) - if self.template.type_name == "Weaviate": - extra_field = TemplateField( - name="weaviate_url", - field_type="str", - required=True, - placeholder="http://localhost:8080", - show=True, - advanced=False, - multiline=False, - value="http://localhost:8080", - ) - # Add client_kwargs field - extra_field2 = TemplateField( - name="client_kwargs", - field_type="code", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - value="{}", - ) - extra_fields.extend((extra_field, extra_field2)) - - elif self.template.type_name == "Chroma": - # New bool field for persist parameter - chroma_fields = [ - TemplateField( - name="persist", - field_type="bool", - required=False, - show=True, - advanced=False, - value=False, - display_name="Persist", - ), - # chroma_server_grpc_port: str | None = None, - TemplateField( - name="chroma_server_host", - field_type="str", - required=False, - show=True, - advanced=True, - display_name="Chroma Server Host", - ), - TemplateField( - name="chroma_server_http_port", - field_type="str", - required=False, - show=True, - advanced=True, - display_name="Chroma Server HTTP Port", - ), - TemplateField( - name="chroma_server_ssl_enabled", - field_type="bool", - required=False, - show=True, - advanced=True, - value=False, - display_name="Chroma Server SSL Enabled", - ), - TemplateField( - name="chroma_server_grpc_port", - field_type="str", - required=False, - show=True, - advanced=True, - display_name="Chroma Server GRPC Port", - ), - TemplateField( - name="chroma_server_cors_allow_origins", - field_type="str", - required=False, - is_list=True, - show=True, - advanced=True, - display_name="Chroma Server CORS Allow Origins", - ), - ] - - extra_fields.extend(chroma_fields) - elif self.template.type_name == "Pinecone": - # add pinecone_api_key and pinecone_env - extra_field = TemplateField( - name="pinecone_api_key", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - password=True, - value="", - ) - extra_field2 = TemplateField( - name="pinecone_env", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - value="", - ) - extra_fields.extend((extra_field, extra_field2)) - - elif self.template.type_name == "ElasticsearchStore": - # add elastic and elastic credentials - extra_field = TemplateField( - name="es_url", - field_type="str", - required=True, - placeholder="http://localhost:9200", - show=True, - advanced=False, - multiline=False, - value="http://localhost:9200", - display_name="Elasticsearch URL", - ) - extra_field2 = TemplateField( - name="index_name", - field_type="str", - required=True, - placeholder="test-index", - show=True, - advanced=False, - multiline=False, - value="test-index", - display_name="Index Name", - ) - extra_fields.extend((extra_field, extra_field2)) - - elif self.template.type_name == "FAISS": - extra_field = TemplateField( - name="folder_path", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - display_name="Local Path", - value="", - ) - extra_field2 = TemplateField( - name="index_name", - field_type="str", - required=False, - show=True, - advanced=False, - value="", - display_name="Index Name", - ) - extra_fields.extend((extra_field, extra_field2)) - elif self.template.type_name == "SupabaseVectorStore": - self.display_name = "Supabase" - # Add table_name and query_name - extra_field = TemplateField( - name="table_name", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - value="", - ) - extra_field2 = TemplateField( - name="query_name", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - value="", - ) - # Add supabase_url and supabase_service_key - extra_field3 = TemplateField( - name="supabase_url", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - value="", - ) - extra_field4 = TemplateField( - name="supabase_service_key", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - password=True, - value="", - ) - extra_fields.extend((extra_field, extra_field2, extra_field3, extra_field4)) - - elif self.template.type_name == "MongoDBAtlasVectorSearch": - self.display_name = "MongoDB Atlas" - - extra_field = TemplateField( - name="mongodb_atlas_cluster_uri", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - display_name="MongoDB Atlas Cluster URI", - value="", - ) - extra_field2 = TemplateField( - name="collection_name", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - display_name="Collection Name", - value="", - ) - extra_field3 = TemplateField( - name="db_name", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - display_name="Database Name", - value="", - ) - extra_field4 = TemplateField( - name="index_name", - field_type="str", - required=False, - placeholder="", - show=True, - advanced=True, - multiline=False, - display_name="Index Name", - value="", - ) - extra_fields.extend((extra_field, extra_field2, extra_field3, extra_field4)) - - if extra_fields: - for field in extra_fields: - self.template.add_field(field) - - def add_extra_base_classes(self) -> None: - self.base_classes.extend(("BaseRetriever", "VectorStoreRetriever")) - - @staticmethod - def format_field(field: TemplateField, name: Optional[str] = None) -> None: - FrontendNode.format_field(field, name) - # Define common field attributes - - # Check and set field attributes - if field.name == "texts": - # if field.name is "texts" it has to be replaced - # when instantiating the vectorstores - field.name = "documents" - - field.field_type = "Document" - field.display_name = "Documents" - field.required = False - field.show = True - field.advanced = False - field.is_list = True - elif field.name and "embedding" in field.name: - # for backwards compatibility - field.name = "embedding" - field.required = True - field.show = True - field.advanced = False - field.display_name = "Embedding" - field.field_type = "Embeddings" - - elif field.name in BASIC_FIELDS: - field.show = True - field.advanced = False - if field.name == "api_key": - field.display_name = "API Key" - field.password = True - elif field.name == "location": - field.value = ":memory:" - field.placeholder = ":memory:" - - elif field.name in ADVANCED_FIELDS: - field.show = True - field.advanced = True - if "key" in field.name: - field.password = False - - elif field.name == "text_key": - field.show = False diff --git a/src/backend/base/langflow/utils/migration.py b/src/backend/base/langflow/utils/migration.py new file mode 100644 index 000000000..b85522c5b --- /dev/null +++ b/src/backend/base/langflow/utils/migration.py @@ -0,0 +1,65 @@ +from sqlalchemy.engine.reflection import Inspector + + +def table_exists(name, conn): + """ + Check if a table exists. + + Parameters: + name (str): The name of the table to check. + conn (sqlalchemy.engine.Engine or sqlalchemy.engine.Connection): The SQLAlchemy engine or connection to use. + + Returns: + bool: True if the table exists, False otherwise. + """ + inspector = Inspector.from_engine(conn) + return name in inspector.get_table_names() + + +def column_exists(table_name, column_name, conn): + """ + Check if a column exists in a table. + + Parameters: + table_name (str): The name of the table to check. + column_name (str): The name of the column to check. + conn (sqlalchemy.engine.Engine or sqlalchemy.engine.Connection): The SQLAlchemy engine or connection to use. + + Returns: + bool: True if the column exists, False otherwise. + """ + inspector = Inspector.from_engine(conn) + return column_name in [column["name"] for column in inspector.get_columns(table_name)] + + +def foreign_key_exists(table_name, fk_name, conn): + """ + Check if a foreign key exists in a table. + + Parameters: + table_name (str): The name of the table to check. + fk_name (str): The name of the foreign key to check. + conn (sqlalchemy.engine.Engine or sqlalchemy.engine.Connection): The SQLAlchemy engine or connection to use. + + Returns: + bool: True if the foreign key exists, False otherwise. + """ + inspector = Inspector.from_engine(conn) + return fk_name in [fk["name"] for fk in inspector.get_foreign_keys(table_name)] + + +def constraint_exists(table_name, constraint_name, conn): + """ + Check if a constraint exists in a table. + + Parameters: + table_name (str): The name of the table to check. + constraint_name (str): The name of the constraint to check. + conn (sqlalchemy.engine.Engine or sqlalchemy.engine.Connection): The SQLAlchemy engine or connection to use. + + Returns: + bool: True if the constraint exists, False otherwise. + """ + inspector = Inspector.from_engine(conn) + constraints = inspector.get_unique_constraints(table_name) + return constraint_name in [constraint["name"] for constraint in constraints] diff --git a/src/backend/base/langflow/utils/util.py b/src/backend/base/langflow/utils/util.py index a4cce8ea3..bc7efc161 100644 --- a/src/backend/base/langflow/utils/util.py +++ b/src/backend/base/langflow/utils/util.py @@ -6,6 +6,8 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Union from docstring_parser import parse + + from langflow.schema.schema import Record from langflow.services.deps import get_settings_service from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS @@ -438,13 +440,13 @@ def update_settings( settings_service.settings.update_from_yaml(config, dev=dev) if remove_api_keys: logger.debug(f"Setting remove_api_keys to {remove_api_keys}") - settings_service.settings.update_settings(REMOVE_API_KEYS=remove_api_keys) + settings_service.settings.update_settings(remove_api_keys=remove_api_keys) if cache: logger.debug(f"Setting cache to {cache}") - settings_service.settings.update_settings(CACHE=cache) + settings_service.settings.update_settings(cache=cache) if components_path: logger.debug(f"Adding component path {components_path}") - settings_service.settings.update_settings(COMPONENTS_PATH=components_path) + settings_service.settings.update_settings(components_path=components_path) if not store: logger.debug("Setting store to False") - settings_service.settings.update_settings(STORE=False) + settings_service.settings.update_settings(store=False) diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock index b1607cf7e..21906d71b 100644 --- a/src/backend/base/poetry.lock +++ b/src/backend/base/poetry.lock @@ -142,13 +142,13 @@ files = [ [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -1224,18 +1224,18 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15. [[package]] name = "langchain-core" -version = "0.2.1" +version = "0.2.3" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.2.1-py3-none-any.whl", hash = "sha256:3521e1e573988c47399fca9739270c5d34f8ecec147253ad829eb9ff288f76d5"}, - {file = "langchain_core-0.2.1.tar.gz", hash = "sha256:49383126168d934559a543ce812c485048d9e6ac9b6798fbf3d4a72b6bba5b0c"}, + {file = "langchain_core-0.2.3-py3-none-any.whl", hash = "sha256:22189b5a3a30bfd65eb995f95e627f7c2c3acb322feb89f5f5f2fb7df21833a7"}, + {file = "langchain_core-0.2.3.tar.gz", hash = "sha256:fbc75a64b9c0b7655d96ca57a707df1e6c09efc1539c36adbd73260612549810"}, ] [package.dependencies] jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.0,<0.2.0" +langsmith = ">=0.1.65,<0.2.0" packaging = ">=23.2,<24.0" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -1281,13 +1281,13 @@ extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"] [[package]] name = "langchainhub" -version = "0.1.15" +version = "0.1.17" description = "The LangChain Hub API client" optional = false -python-versions = ">=3.8.1,<4.0" +python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchainhub-0.1.15-py3-none-any.whl", hash = "sha256:89a0951abd1db255e91c6d545d092a598fc255aa865d1ffc3ce8f93bbeae60e7"}, - {file = "langchainhub-0.1.15.tar.gz", hash = "sha256:fa3ff81a31946860f84c119f1e2f6b7c7707e2bd7ed2394a7313b286d59f3bda"}, + {file = "langchainhub-0.1.17-py3-none-any.whl", hash = "sha256:4c609b3948252c71670f0d98f73413b515cfd2f6701a7b40ce959203e6133e04"}, + {file = "langchainhub-0.1.17.tar.gz", hash = "sha256:af7df0cb1cebc7a6e0864e8632ae48ecad39ed96568f699c78657b9d04e50b46"}, ] [package.dependencies] @@ -1296,13 +1296,13 @@ types-requests = ">=2.31.0.2,<3.0.0.0" [[package]] name = "langsmith" -version = "0.1.63" +version = "0.1.67" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.63-py3-none-any.whl", hash = "sha256:7810afdf5e3f3b472fc581a29371fb96cd843dde2149e048d1b9610325159d1e"}, - {file = "langsmith-0.1.63.tar.gz", hash = "sha256:a609405b52f6f54df442a142cbf19ab38662d54e532f96028b4c546434d4afdf"}, + {file = "langsmith-0.1.67-py3-none-any.whl", hash = "sha256:7eb2e1c1b375925ff47700ed8071e10c15e942e9d1d634b4a449a9060364071a"}, + {file = "langsmith-0.1.67.tar.gz", hash = "sha256:149558669a2ac4f21471cd964e61072687bba23b7c1ccb51f190a8f59b595b39"}, ] [package.dependencies] @@ -2104,18 +2104,18 @@ files = [ [[package]] name = "pydantic" -version = "2.7.1" +version = "2.7.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, - {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, + {file = "pydantic-2.7.2-py3-none-any.whl", hash = "sha256:834ab954175f94e6e68258537dc49402c4a5e9d0409b9f1b86b7e934a8372de7"}, + {file = "pydantic-2.7.2.tar.gz", hash = "sha256:71b2945998f9c9b7919a45bde9a50397b289937d215ae141c1d0903ba7149fd7"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.2" +pydantic-core = "2.18.3" typing-extensions = ">=4.6.1" [package.extras] @@ -2123,90 +2123,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.2" +version = "2.18.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, - {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, - {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, - {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, - {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, - {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, - {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, - {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, - {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, - {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, - {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, - {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, - {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, - {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:744697428fcdec6be5670460b578161d1ffe34743a5c15656be7ea82b008197c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b40c05ced1ba4218b14986fe6f283d22e1ae2ff4c8e28881a70fb81fbfcda7"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a9a75622357076efb6b311983ff190fbfb3c12fc3a853122b34d3d358126c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2e253af04ceaebde8eb201eb3f3e3e7e390f2d275a88300d6a1959d710539e2"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:855ec66589c68aa367d989da5c4755bb74ee92ccad4fdb6af942c3612c067e34"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d3e42bb54e7e9d72c13ce112e02eb1b3b55681ee948d748842171201a03a98a"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6ac9ffccc9d2e69d9fba841441d4259cb668ac180e51b30d3632cd7abca2b9b"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c56eca1686539fa0c9bda992e7bd6a37583f20083c37590413381acfc5f192d6"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17954d784bf8abfc0ec2a633108207ebc4fa2df1a0e4c0c3ccbaa9bb01d2c426"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:98ed737567d8f2ecd54f7c8d4f8572ca7c7921ede93a2e52939416170d357812"}, + {file = "pydantic_core-2.18.3-cp310-none-win32.whl", hash = "sha256:9f9e04afebd3ed8c15d67a564ed0a34b54e52136c6d40d14c5547b238390e779"}, + {file = "pydantic_core-2.18.3-cp310-none-win_amd64.whl", hash = "sha256:45e4ffbae34f7ae30d0047697e724e534a7ec0a82ef9994b7913a412c21462a0"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9ebe8231726c49518b16b237b9fe0d7d361dd221302af511a83d4ada01183ab"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8e20e15d18bf7dbb453be78a2d858f946f5cdf06c5072453dace00ab652e2b2"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d9ff283cd3459fa0bf9b0256a2b6f01ac1ff9ffb034e24457b9035f75587cb"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f7ef5f0ebb77ba24c9970da18b771711edc5feaf00c10b18461e0f5f5949231"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73038d66614d2e5cde30435b5afdced2b473b4c77d4ca3a8624dd3e41a9c19be"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6afd5c867a74c4d314c557b5ea9520183fadfbd1df4c2d6e09fd0d990ce412cd"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd7df92f28d351bb9f12470f4c533cf03d1b52ec5a6e5c58c65b183055a60106"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80aea0ffeb1049336043d07799eace1c9602519fb3192916ff525b0287b2b1e4"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaee40f25bba38132e655ffa3d1998a6d576ba7cf81deff8bfa189fb43fd2bbe"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9128089da8f4fe73f7a91973895ebf2502539d627891a14034e45fb9e707e26d"}, + {file = "pydantic_core-2.18.3-cp311-none-win32.whl", hash = "sha256:fec02527e1e03257aa25b1a4dcbe697b40a22f1229f5d026503e8b7ff6d2eda7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_amd64.whl", hash = "sha256:58ff8631dbab6c7c982e6425da8347108449321f61fe427c52ddfadd66642af7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_arm64.whl", hash = "sha256:3fc1c7f67f34c6c2ef9c213e0f2a351797cda98249d9ca56a70ce4ebcaba45f4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f0928cde2ae416a2d1ebe6dee324709c6f73e93494d8c7aea92df99aab1fc40f"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bee9bb305a562f8b9271855afb6ce00223f545de3d68560b3c1649c7c5295e9"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e862823be114387257dacbfa7d78547165a85d7add33b446ca4f4fae92c7ff5c"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a36f78674cbddc165abab0df961b5f96b14461d05feec5e1f78da58808b97e7"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba905d184f62e7ddbb7a5a751d8a5c805463511c7b08d1aca4a3e8c11f2e5048"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fdd362f6a586e681ff86550b2379e532fee63c52def1c666887956748eaa326"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b214b7ee3bd3b865e963dbed0f8bc5375f49449d70e8d407b567af3222aae4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:691018785779766127f531674fa82bb368df5b36b461622b12e176c18e119022"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:60e4c625e6f7155d7d0dcac151edf5858102bc61bf959d04469ca6ee4e8381bd"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4e651e47d981c1b701dcc74ab8fec5a60a5b004650416b4abbef13db23bc7be"}, + {file = "pydantic_core-2.18.3-cp312-none-win32.whl", hash = "sha256:ffecbb5edb7f5ffae13599aec33b735e9e4c7676ca1633c60f2c606beb17efc5"}, + {file = "pydantic_core-2.18.3-cp312-none-win_amd64.whl", hash = "sha256:2c8333f6e934733483c7eddffdb094c143b9463d2af7e6bd85ebcb2d4a1b82c6"}, + {file = "pydantic_core-2.18.3-cp312-none-win_arm64.whl", hash = "sha256:7a20dded653e516a4655f4c98e97ccafb13753987434fe7cf044aa25f5b7d417"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:eecf63195be644b0396f972c82598cd15693550f0ff236dcf7ab92e2eb6d3522"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c44efdd3b6125419c28821590d7ec891c9cb0dff33a7a78d9d5c8b6f66b9702"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e59fca51ffbdd1638b3856779342ed69bcecb8484c1d4b8bdb237d0eb5a45e2"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70cf099197d6b98953468461d753563b28e73cf1eade2ffe069675d2657ed1d5"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63081a49dddc6124754b32a3774331467bfc3d2bd5ff8f10df36a95602560361"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:370059b7883485c9edb9655355ff46d912f4b03b009d929220d9294c7fd9fd60"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a64faeedfd8254f05f5cf6fc755023a7e1606af3959cfc1a9285744cc711044"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19d2e725de0f90d8671f89e420d36c3dd97639b98145e42fcc0e1f6d492a46dc"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:67bc078025d70ec5aefe6200ef094576c9d86bd36982df1301c758a9fff7d7f4"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf952c3f4100e203cbaf8e0c907c835d3e28f9041474e52b651761dc248a3c0"}, + {file = "pydantic_core-2.18.3-cp38-none-win32.whl", hash = "sha256:9a46795b1f3beb167eaee91736d5d17ac3a994bf2215a996aed825a45f897558"}, + {file = "pydantic_core-2.18.3-cp38-none-win_amd64.whl", hash = "sha256:200ad4e3133cb99ed82342a101a5abf3d924722e71cd581cc113fe828f727fbc"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:304378b7bf92206036c8ddd83a2ba7b7d1a5b425acafff637172a3aa72ad7083"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c826870b277143e701c9ccf34ebc33ddb4d072612683a044e7cce2d52f6c3fef"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e201935d282707394f3668380e41ccf25b5794d1b131cdd96b07f615a33ca4b1"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5560dda746c44b48bf82b3d191d74fe8efc5686a9ef18e69bdabccbbb9ad9442"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b32c2a1f8032570842257e4c19288eba9a2bba4712af542327de9a1204faff8"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:929c24e9dea3990bc8bcd27c5f2d3916c0c86f5511d2caa69e0d5290115344a9"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a8376fef60790152564b0eab376b3e23dd6e54f29d84aad46f7b264ecca943"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dccf3ef1400390ddd1fb55bf0632209d39140552d068ee5ac45553b556780e06"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41dbdcb0c7252b58fa931fec47937edb422c9cb22528f41cb8963665c372caf6"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:666e45cf071669fde468886654742fa10b0e74cd0fa0430a46ba6056b24fb0af"}, + {file = "pydantic_core-2.18.3-cp39-none-win32.whl", hash = "sha256:f9c08cabff68704a1b4667d33f534d544b8a07b8e5d039c37067fceb18789e78"}, + {file = "pydantic_core-2.18.3-cp39-none-win_amd64.whl", hash = "sha256:4afa5f5973e8572b5c0dcb4e2d4fda7890e7cd63329bd5cc3263a25c92ef0026"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:77319771a026f7c7d29c6ebc623de889e9563b7087911b46fd06c044a12aa5e9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:df11fa992e9f576473038510d66dd305bcd51d7dd508c163a8c8fe148454e059"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d531076bdfb65af593326ffd567e6ab3da145020dafb9187a1d131064a55f97c"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33ce258e4e6e6038f2b9e8b8a631d17d017567db43483314993b3ca345dcbbb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f9cd7f5635b719939019be9bda47ecb56e165e51dd26c9a217a433e3d0d59a9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cd4a032bb65cc132cae1fe3e52877daecc2097965cd3914e44fbd12b00dae7c5"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f2718430098bcdf60402136c845e4126a189959d103900ebabb6774a5d9fdb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0037a92cf0c580ed14e10953cdd26528e8796307bb8bb312dc65f71547df04d"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b95a0972fac2b1ff3c94629fc9081b16371dad870959f1408cc33b2f78ad347a"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a62e437d687cc148381bdd5f51e3e81f5b20a735c55f690c5be94e05da2b0d5c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b367a73a414bbb08507da102dc2cde0fa7afe57d09b3240ce82a16d608a7679c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ecce4b2360aa3f008da3327d652e74a0e743908eac306198b47e1c58b03dd2b"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4435b8d83f0c9561a2a9585b1de78f1abb17cb0cef5f39bf6a4b47d19bafe3"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:616221a6d473c5b9aa83fa8982745441f6a4a62a66436be9445c65f241b86c94"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7e6382ce89a92bc1d0c0c5edd51e931432202b9080dc921d8d003e616402efd1"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff58f379345603d940e461eae474b6bbb6dab66ed9a851ecd3cb3709bf4dcf6a"}, + {file = "pydantic_core-2.18.3.tar.gz", hash = "sha256:432e999088d85c8f36b9a3f769a8e2b57aabd817bbb729a90d1fe7f18f6f1f39"}, ] [package.dependencies] @@ -2266,6 +2266,16 @@ docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] image = ["Pillow (>=8.0.0)"] +[[package]] +name = "pyperclip" +version = "1.8.2" +description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +optional = false +python-versions = "*" +files = [ + {file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"}, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2456,13 +2466,13 @@ files = [ [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2846,6 +2856,21 @@ files = [ {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, ] +[[package]] +name = "uncurl" +version = "0.0.11" +description = "A library to convert curl requests to python-requests." +optional = false +python-versions = "*" +files = [ + {file = "uncurl-0.0.11-py3-none-any.whl", hash = "sha256:5961e93f07a5c9f2ef8ae4245bd92b0a6ce503c851de980f5b70080ae74cdc59"}, + {file = "uncurl-0.0.11.tar.gz", hash = "sha256:530c9bbd4d118f4cde6194165ff484cc25b0661cd256f19e9d5fcb53fc077790"}, +] + +[package.dependencies] +pyperclip = "*" +six = "*" + [[package]] name = "urllib3" version = "2.2.1" @@ -2935,86 +2960,86 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "watchfiles" -version = "0.21.0" +version = "0.22.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.8" files = [ - {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, - {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, - {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, - {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, - {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, - {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, - {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, - {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, - {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, - {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, - {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, - {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, - {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, - {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, - {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, + {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"}, + {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"}, + {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"}, + {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"}, + {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"}, + {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"}, + {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"}, + {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"}, + {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"}, + {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"}, + {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"}, + {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"}, + {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"}, + {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"}, + {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"}, ] [package.dependencies] @@ -3240,4 +3265,4 @@ local = [] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "33e71f349d108a5bc98b0f8cd0f5c14736202f3db30b0ea1acf588163ef5fbe3" +content-hash = "1dc0dd442df5d174ea85c859208f5cfea9d4785c7b7a93f2c6bf8c92e93d8cad" diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml index c4e11d068..30530c541 100644 --- a/src/backend/base/pyproject.toml +++ b/src/backend/base/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow-base" -version = "0.0.48" +version = "0.0.54" description = "A Python package with a built-in web application" authors = ["Langflow "] maintainers = [ @@ -61,6 +61,8 @@ nest-asyncio = "^1.6.0" emoji = "^2.12.0" cryptography = "^42.0.5" asyncer = "^0.0.5" +pyperclip = "^1.8.2" +uncurl = "^0.0.11" [tool.poetry.extras] diff --git a/example.har b/src/frontend/harFiles/example.har similarity index 100% rename from example.har rename to src/frontend/harFiles/example.har diff --git a/src/frontend/src/App.tsx b/src/frontend/src/App.tsx index 922ec9994..5eee2bfa6 100644 --- a/src/frontend/src/App.tsx +++ b/src/frontend/src/App.tsx @@ -1,3 +1,4 @@ +import axios from "axios"; import { useContext, useEffect, useState } from "react"; import { ErrorBoundary } from "react-error-boundary"; import { useNavigate } from "react-router-dom"; @@ -15,6 +16,7 @@ import { } from "./constants/constants"; import { AuthContext } from "./contexts/authContext"; import { autoLogin, getGlobalVariables, getHealth } from "./controllers/API"; +import { setupAxiosDefaults } from "./controllers/API/utils"; import useTrackLastVisitedPath from "./hooks/use-track-last-visited-path"; import Router from "./routes"; import useAlertStore from "./stores/alertStore"; @@ -114,6 +116,7 @@ export default function App() { return new Promise(async (resolve, reject) => { if (isAuthenticated) { try { + await setupAxiosDefaults(); await getFoldersApi(); await getTypes(); await refreshFlows(); diff --git a/src/frontend/src/components/cardComponent/components/dragCardComponent/index.tsx b/src/frontend/src/components/cardComponent/components/dragCardComponent/index.tsx index e4425c61c..28674f3bc 100644 --- a/src/frontend/src/components/cardComponent/components/dragCardComponent/index.tsx +++ b/src/frontend/src/components/cardComponent/components/dragCardComponent/index.tsx @@ -1,7 +1,6 @@ import { storeComponent } from "../../../../types/store"; import { cn } from "../../../../utils/utils"; import ForwardedIconComponent from "../../../genericIconComponent"; -import ShadTooltip from "../../../shadTooltipComponent"; import { Card, CardHeader, CardTitle } from "../../../ui/card"; export default function DragCardComponent({ data }: { data: storeComponent }) { diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index 1e745f950..0a0b691c8 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -841,9 +841,7 @@ export default function CodeTabsComponent({ node.data.node!.template[ templateField ].value?.toString() === "{}" - ? { - // yourkey: "value", - } + ? {} : node.data.node! .template[ templateField diff --git a/src/frontend/src/components/dictComponent/index.tsx b/src/frontend/src/components/dictComponent/index.tsx index 2cf622e93..39850e6e3 100644 --- a/src/frontend/src/components/dictComponent/index.tsx +++ b/src/frontend/src/components/dictComponent/index.tsx @@ -12,6 +12,9 @@ export default function DictComponent({ editNode = false, id = "", }: DictComponentType): JSX.Element { + // Create a reference to the value + const ref = useRef(value); + useEffect(() => { if (disabled) { onChange({}); @@ -19,15 +22,14 @@ export default function DictComponent({ }, [disabled]); useEffect(() => { - if (value) onChange(value); + // Update the reference value + ref.current = value; }, [value]); - - const ref = useRef(value); return (
1 && editNode ? "my-1" : "", - "flex flex-col gap-3" + "flex flex-col gap-3", )} > { diff --git a/src/frontend/src/components/editFlowSettingsComponent/index.tsx b/src/frontend/src/components/editFlowSettingsComponent/index.tsx index 94ee4f19e..fc3be2197 100644 --- a/src/frontend/src/components/editFlowSettingsComponent/index.tsx +++ b/src/frontend/src/components/editFlowSettingsComponent/index.tsx @@ -9,11 +9,14 @@ export const EditFlowSettings: React.FC = ({ name, invalidNameList, description, + endpointName, maxLength = 50, setName, setDescription, + setEndpointName, }: InputProps): JSX.Element => { const [isMaxLength, setIsMaxLength] = useState(false); + const [isEndpointNameValid, setIsEndpointNameValid] = useState(true); const handleNameChange = (event: ChangeEvent) => { const { value } = event.target; @@ -29,6 +32,18 @@ export const EditFlowSettings: React.FC = ({ setDescription!(event.target.value); }; + const handleEndpointNameChange = (event: ChangeEvent) => { + // Validate the endpoint name + // use this regex r'^[a-zA-Z0-9_-]+$' + const isValid = + (/^[a-zA-Z0-9_-]+$/.test(event.target.value) && + event.target.value.length <= maxLength) || + // empty is also valid + event.target.value.length === 0; + setIsEndpointNameValid(isValid); + setEndpointName!(event.target.value); + }; + //this function is necessary to select the text when double clicking, this was not working with the onFocus event const handleFocus = (event) => event.target.select(); @@ -84,13 +99,39 @@ export const EditFlowSettings: React.FC = ({ {description === "" ? "No description" : description} )} + {setEndpointName && ( + + )} ); }; diff --git a/src/frontend/src/components/headerComponent/index.tsx b/src/frontend/src/components/headerComponent/index.tsx index 4f6c02bd8..0abd16d3d 100644 --- a/src/frontend/src/components/headerComponent/index.tsx +++ b/src/frontend/src/components/headerComponent/index.tsx @@ -181,7 +181,7 @@ export default function Header(): JSX.Element { />
- {!autoLogin && ( + {autoLogin && ( - {/* + - */} +